Compare commits
235 Commits
pandoc-hac
...
csh-issue-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a47237ccb7 | ||
|
|
ab82140128 | ||
|
|
4d81b2ca53 | ||
|
|
c1eca448c5 | ||
|
|
d04df4ed75 | ||
|
|
d9f487efc4 | ||
|
|
02b5cc8efd | ||
|
|
f3c6756294 | ||
|
|
3ab407b91a | ||
|
|
0b40c8f79d | ||
|
|
882732d6a5 | ||
|
|
d934f96370 | ||
|
|
0f84c47bbe | ||
|
|
fdd87d77cc | ||
|
|
7ebc9b43a1 | ||
|
|
4c4dd64ca6 | ||
|
|
693b9e6193 | ||
|
|
65d416ee10 | ||
|
|
8f70dbd67b | ||
|
|
a62ff6e248 | ||
|
|
481a49a587 | ||
|
|
2675fa033e | ||
|
|
dc6af8799f | ||
|
|
61bed0da2b | ||
|
|
4f6ef61626 | ||
|
|
ada07ad2c3 | ||
|
|
bc530c70e2 | ||
|
|
db00288bb9 | ||
|
|
663ec88718 | ||
|
|
03047f45af | ||
|
|
11cf8a98fa | ||
|
|
d2c2629ef5 | ||
|
|
adfeffd254 | ||
|
|
bd42fe5776 | ||
|
|
3200161308 | ||
|
|
9cb14660d4 | ||
|
|
31153c479c | ||
|
|
f422bb8011 | ||
|
|
25c4c349d7 | ||
|
|
e2377e1c1c | ||
|
|
1899d27732 | ||
|
|
9bf3795ceb | ||
|
|
d20856f799 | ||
|
|
12fee9e4df | ||
|
|
ddaa944aa3 | ||
|
|
a194d7ad05 | ||
|
|
4c8b619ee8 | ||
|
|
4bd67d5e7e | ||
|
|
c269c308ef | ||
|
|
e12ffdd535 | ||
|
|
82afad7afc | ||
|
|
2fceac6ac8 | ||
|
|
d4e9aca9e2 | ||
|
|
5d2eb129e8 | ||
|
|
b52a8b2aa2 | ||
|
|
6fbdcd76d0 | ||
|
|
541dac11cb | ||
|
|
ee7947f54d | ||
|
|
984474ee11 | ||
|
|
be855805c9 | ||
|
|
2d023a3b03 | ||
|
|
1894e8ad5d | ||
|
|
9507f0f80f | ||
|
|
19078fe866 | ||
|
|
38874f9169 | ||
|
|
855f26c520 | ||
|
|
8401bbdc26 | ||
|
|
71181243b3 | ||
|
|
0b4ae6ef8d | ||
|
|
747c73fdad | ||
|
|
1c1610a0bc | ||
|
|
434e819d23 | ||
|
|
f92e626647 | ||
|
|
6159aff001 | ||
|
|
49d5ad711a | ||
|
|
bcdac34a0b | ||
|
|
25cb54d1d7 | ||
|
|
75e77a3991 | ||
|
|
49f3b7d54f | ||
|
|
f1ab938bab | ||
|
|
a18d49562c | ||
|
|
d3039a52f3 | ||
|
|
7e07b8b4a7 | ||
|
|
473efdae70 | ||
|
|
3aa160b0e7 | ||
|
|
114e4f7043 | ||
|
|
cd0a71caba | ||
|
|
96d6fb3404 | ||
|
|
1481b4fe50 | ||
|
|
3aad472a83 | ||
|
|
49ddcee0c6 | ||
|
|
6d1545a40e | ||
|
|
9ce7bfa8ab | ||
|
|
7c4c8a9e44 | ||
|
|
90436933da | ||
|
|
77abf19f6b | ||
|
|
a781c7f600 | ||
|
|
b07b7a84be | ||
|
|
58b4de905c | ||
|
|
5f9fb85613 | ||
|
|
d3bb863d0a | ||
|
|
00ebc87230 | ||
|
|
6299832a13 | ||
|
|
607bb74ffa | ||
|
|
b4107b7391 | ||
|
|
5074442702 | ||
|
|
05ddbd3a18 | ||
|
|
7b773474d9 | ||
|
|
e4d28addf9 | ||
|
|
171ad0329d | ||
|
|
834eeffda4 | ||
|
|
0f179a7c7c | ||
|
|
1990f20dc0 | ||
|
|
407c7c235b | ||
|
|
988f177f79 | ||
|
|
c6f49f04a9 | ||
|
|
a26d7093b4 | ||
|
|
eec0529ef7 | ||
|
|
382f30f810 | ||
|
|
95e052d059 | ||
|
|
9f79229835 | ||
|
|
95b2e8caae | ||
|
|
3890cdec37 | ||
|
|
3e3468d9e9 | ||
|
|
9ef9a3b780 | ||
|
|
ee518c1755 | ||
|
|
3a9206f1e7 | ||
|
|
d1ce49d6d7 | ||
|
|
627bed428e | ||
|
|
92e1240635 | ||
|
|
94a52333f7 | ||
|
|
c490479a1a | ||
|
|
f802717cb5 | ||
|
|
0eeee4284d | ||
|
|
e1c23be845 | ||
|
|
67d34fdaf0 | ||
|
|
465dc31e75 | ||
|
|
2b6032b249 | ||
|
|
3478c28fa3 | ||
|
|
3e26efe06f | ||
|
|
fb00098fc0 | ||
|
|
33092baf90 | ||
|
|
4830e9f785 | ||
|
|
368f9b1c5d | ||
|
|
bcb87620b5 | ||
|
|
dd015a05cb | ||
|
|
8d846f64a9 | ||
|
|
3545852173 | ||
|
|
7fc9412141 | ||
|
|
a960614eb4 | ||
|
|
38bd598eb4 | ||
|
|
97716365af | ||
|
|
c1277e9f22 | ||
|
|
a75cec7d52 | ||
|
|
6464aefdb4 | ||
|
|
ec85957ae4 | ||
|
|
4bfc02ef3b | ||
|
|
364c8097c8 | ||
|
|
911e1d58f7 | ||
|
|
dd93d37460 | ||
|
|
82b996b145 | ||
|
|
b3033c1686 | ||
|
|
547ef679b4 | ||
|
|
b30890ef99 | ||
|
|
926667f365 | ||
|
|
0a70985ba5 | ||
|
|
4ca8027cb8 | ||
|
|
da216c52e9 | ||
|
|
e6532b5681 | ||
|
|
85aec72206 | ||
|
|
f000ecb681 | ||
|
|
436f69f3a6 | ||
|
|
38e91ab3e4 | ||
|
|
0b3af7d759 | ||
|
|
9548615169 | ||
|
|
da814b0e3a | ||
|
|
e544ad9a23 | ||
|
|
1814f1c997 | ||
|
|
98a4e60eb7 | ||
|
|
ca23cd42ad | ||
|
|
b330ee2d5b | ||
|
|
b5a7eabaab | ||
|
|
ec75f9fa67 | ||
|
|
dc1ea9d3e9 | ||
|
|
4d955a8d41 | ||
|
|
0915ac8c60 | ||
|
|
aeb6f48945 | ||
|
|
8ccbfc7d32 | ||
|
|
0bd9377018 | ||
|
|
3c1d7ab264 | ||
|
|
3d9a93ad61 | ||
|
|
17c51c2ba0 | ||
|
|
f4226ecd0e | ||
|
|
6fbfcfc68b | ||
|
|
63145cc60c | ||
|
|
5739a2aeca | ||
|
|
9f8a68be38 | ||
|
|
1dce40c61f | ||
|
|
52982b8fcd | ||
|
|
a741a238a8 | ||
|
|
0c1b699bd5 | ||
|
|
dc3cb439d0 | ||
|
|
83c7068bd1 | ||
|
|
b9d94fb428 | ||
|
|
7dbed15fea | ||
|
|
3c4870f688 | ||
|
|
4ff1121353 | ||
|
|
aca9100c52 | ||
|
|
96a237fb74 | ||
|
|
4e6514b17e | ||
|
|
00cf5468d0 | ||
|
|
177c46df98 | ||
|
|
2f96350b7c | ||
|
|
f1df41112b | ||
|
|
b202af3cf2 | ||
|
|
3bdd50a231 | ||
|
|
3134b8aada | ||
|
|
aa0f9ee0be | ||
|
|
4dd11f3442 | ||
|
|
ae7357778e | ||
|
|
c6b962a8b9 | ||
|
|
3de14a3f17 | ||
|
|
49a35c5e11 | ||
|
|
b9874b5ae5 | ||
|
|
5cb3bfcbbb | ||
|
|
1a47887e80 | ||
|
|
70f016af1f | ||
|
|
b8c22f4d74 | ||
|
|
8f6db5baff | ||
|
|
d698cc318f | ||
|
|
12b13d6199 | ||
|
|
a02adacc98 | ||
|
|
a2a8b70b74 | ||
|
|
017ba3a4ec | ||
|
|
b64106b730 |
9
.dockerignore
Normal file
9
.dockerignore
Normal file
@@ -0,0 +1,9 @@
|
||||
node_modules/*
|
||||
gitrev
|
||||
.git
|
||||
.gitignore
|
||||
.npm
|
||||
.nvmrc
|
||||
nodemon.json
|
||||
app.js
|
||||
**/js/*
|
||||
38
.github/ISSUE_TEMPLATE.md
vendored
Normal file
38
.github/ISSUE_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
<!-- BUG REPORT TEMPLATE -->
|
||||
|
||||
## Steps to Reproduce
|
||||
<!-- Describe the steps leading up to when / where you found the bug. -->
|
||||
<!-- Screenshots may be helpful here. -->
|
||||
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
## Expected Behaviour
|
||||
<!-- What should have happened when you completed the steps above? -->
|
||||
|
||||
## Observed Behaviour
|
||||
<!-- What actually happened when you completed the steps above? -->
|
||||
<!-- Screenshots may be helpful here. -->
|
||||
|
||||
## Context
|
||||
<!-- How has this issue affected you? What were you trying to accomplish? -->
|
||||
|
||||
## Technical Info
|
||||
<!-- Provide any technical details that may be applicable (or N/A if not applicable). -->
|
||||
|
||||
* URL:
|
||||
* Browser Name and version:
|
||||
* Operating System and version (desktop or mobile):
|
||||
* Signed in as:
|
||||
* Project and/or file:
|
||||
|
||||
## Analysis
|
||||
<!--- Optionally, document investigation of / suggest a fix for the bug, e.g. 'comes from this line / commit' -->
|
||||
|
||||
## Who Needs to Know?
|
||||
<!-- If you want to bring this to the attention of particular people, @-mention them below. -->
|
||||
<!-- If a user reported this bug and should be notified when it is fixed, provide the Front conversation link. -->
|
||||
|
||||
-
|
||||
-
|
||||
45
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
45
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
<!-- Please review https://github.com/overleaf/write_latex/blob/master/.github/CONTRIBUTING.md for guidance on what is expected in each section. -->
|
||||
|
||||
### Description
|
||||
|
||||
|
||||
|
||||
#### Screenshots
|
||||
|
||||
|
||||
|
||||
#### Related Issues / PRs
|
||||
|
||||
|
||||
|
||||
### Review
|
||||
|
||||
|
||||
|
||||
#### Potential Impact
|
||||
|
||||
|
||||
|
||||
#### Manual Testing Performed
|
||||
|
||||
- [ ]
|
||||
- [ ]
|
||||
|
||||
#### Accessibility
|
||||
|
||||
|
||||
|
||||
### Deployment
|
||||
|
||||
|
||||
|
||||
#### Deployment Checklist
|
||||
|
||||
- [ ] Update documentation not included in the PR (if any)
|
||||
- [ ]
|
||||
|
||||
#### Metrics and Monitoring
|
||||
|
||||
|
||||
|
||||
#### Who Needs to Know?
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -7,10 +7,13 @@ test/acceptance/js
|
||||
test/acceptance/fixtures/tmp
|
||||
compiles
|
||||
app.js
|
||||
**/*.map
|
||||
.DS_Store
|
||||
*~
|
||||
cache
|
||||
.vagrant
|
||||
db.sqlite
|
||||
db.sqlite-wal
|
||||
db.sqlite-shm
|
||||
config/*
|
||||
bin/synctex
|
||||
npm-debug.log
|
||||
|
||||
35
.viminfo
Normal file
35
.viminfo
Normal file
@@ -0,0 +1,35 @@
|
||||
# This viminfo file was generated by Vim 7.4.
|
||||
# You may edit it if you're careful!
|
||||
|
||||
# Value of 'encoding' when this file was written
|
||||
*encoding=latin1
|
||||
|
||||
|
||||
# hlsearch on (H) or off (h):
|
||||
~h
|
||||
# Command Line History (newest to oldest):
|
||||
:x
|
||||
|
||||
# Search String History (newest to oldest):
|
||||
|
||||
# Expression History (newest to oldest):
|
||||
|
||||
# Input Line History (newest to oldest):
|
||||
|
||||
# Input Line History (newest to oldest):
|
||||
|
||||
# Registers:
|
||||
|
||||
# File marks:
|
||||
'0 1 0 ~/hello
|
||||
|
||||
# Jumplist (newest first):
|
||||
-' 1 0 ~/hello
|
||||
|
||||
# History of marks within files (newest to oldest):
|
||||
|
||||
> ~/hello
|
||||
" 1 0
|
||||
^ 1 1
|
||||
. 1 0
|
||||
+ 1 0
|
||||
27
Dockerfile
Normal file
27
Dockerfile
Normal file
@@ -0,0 +1,27 @@
|
||||
FROM node:10.15.0 as app
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
#wildcard as some files may not be in all repos
|
||||
COPY package*.json npm-shrink*.json /app/
|
||||
|
||||
RUN npm install --quiet
|
||||
|
||||
COPY . /app
|
||||
|
||||
|
||||
RUN npm run compile:all
|
||||
|
||||
FROM node:10.15.0
|
||||
|
||||
RUN \
|
||||
apt -y update && \
|
||||
apt -y install moreutils
|
||||
|
||||
COPY --from=app /app /app
|
||||
|
||||
WORKDIR /app
|
||||
RUN chmod 0755 ./install_deps.sh && ./install_deps.sh
|
||||
ENTRYPOINT ["/bin/bash", "entrypoint.sh"]
|
||||
|
||||
CMD ["node", "--expose-gc", "app.js"]
|
||||
104
Gruntfile.coffee
104
Gruntfile.coffee
@@ -1,104 +0,0 @@
|
||||
spawn = require("child_process").spawn
|
||||
|
||||
module.exports = (grunt) ->
|
||||
grunt.initConfig
|
||||
coffee:
|
||||
app_src:
|
||||
expand: true,
|
||||
flatten: true,
|
||||
cwd: "app"
|
||||
src: ['coffee/*.coffee'],
|
||||
dest: 'app/js/',
|
||||
ext: '.js'
|
||||
|
||||
app:
|
||||
src: "app.coffee"
|
||||
dest: "app.js"
|
||||
|
||||
unit_tests:
|
||||
expand: true
|
||||
cwd: "test/unit/coffee"
|
||||
src: ["**/*.coffee"]
|
||||
dest: "test/unit/js/"
|
||||
ext: ".js"
|
||||
|
||||
acceptance_tests:
|
||||
expand: true
|
||||
cwd: "test/acceptance/coffee"
|
||||
src: ["**/*.coffee"]
|
||||
dest: "test/acceptance/js/"
|
||||
ext: ".js"
|
||||
|
||||
smoke_tests:
|
||||
expand: true
|
||||
cwd: "test/smoke/coffee"
|
||||
src: ["**/*.coffee"]
|
||||
dest: "test/smoke/js"
|
||||
ext: ".js"
|
||||
|
||||
clean:
|
||||
app: ["app/js/"]
|
||||
unit_tests: ["test/unit/js"]
|
||||
acceptance_tests: ["test/acceptance/js"]
|
||||
smoke_tests: ["test/smoke/js"]
|
||||
|
||||
execute:
|
||||
app:
|
||||
src: "app.js"
|
||||
|
||||
mkdir:
|
||||
all:
|
||||
options:
|
||||
create: ["cache", "compiles"]
|
||||
|
||||
mochaTest:
|
||||
unit:
|
||||
options:
|
||||
reporter: "spec"
|
||||
grep: grunt.option("grep")
|
||||
src: ["test/unit/js/**/*.js"]
|
||||
acceptance:
|
||||
options:
|
||||
reporter: "spec"
|
||||
timeout: 40000
|
||||
grep: grunt.option("grep")
|
||||
src: ["test/acceptance/js/**/*.js"]
|
||||
smoke:
|
||||
options:
|
||||
reported: "spec"
|
||||
timeout: 10000
|
||||
src: ["test/smoke/js/**/*.js"]
|
||||
|
||||
grunt.loadNpmTasks 'grunt-contrib-coffee'
|
||||
grunt.loadNpmTasks 'grunt-contrib-clean'
|
||||
grunt.loadNpmTasks 'grunt-mocha-test'
|
||||
grunt.loadNpmTasks 'grunt-shell'
|
||||
grunt.loadNpmTasks 'grunt-execute'
|
||||
grunt.loadNpmTasks 'grunt-bunyan'
|
||||
grunt.loadNpmTasks 'grunt-mkdir'
|
||||
|
||||
grunt.registerTask 'compile:bin', () ->
|
||||
callback = @async()
|
||||
proc = spawn "cc", [
|
||||
"-o", "bin/synctex", "-Isrc/synctex",
|
||||
"src/synctex.c", "src/synctex/synctex_parser.c", "src/synctex/synctex_parser_utils.c", "-lz"
|
||||
], stdio: "inherit"
|
||||
proc.on "close", callback
|
||||
|
||||
grunt.registerTask 'compile:app', ['clean:app', 'coffee:app', 'coffee:app_src', 'coffee:smoke_tests', 'compile:bin']
|
||||
grunt.registerTask 'run', ['compile:app', 'bunyan', 'execute']
|
||||
|
||||
grunt.registerTask 'compile:unit_tests', ['clean:unit_tests', 'coffee:unit_tests']
|
||||
grunt.registerTask 'test:unit', ['compile:app', 'compile:unit_tests', 'mochaTest:unit']
|
||||
|
||||
grunt.registerTask 'compile:acceptance_tests', ['clean:acceptance_tests', 'coffee:acceptance_tests']
|
||||
grunt.registerTask 'test:acceptance', ['compile:acceptance_tests', 'mochaTest:acceptance']
|
||||
|
||||
grunt.registerTask 'compile:smoke_tests', ['clean:smoke_tests', 'coffee:smoke_tests']
|
||||
grunt.registerTask 'test:smoke', ['compile:smoke_tests', 'mochaTest:smoke']
|
||||
|
||||
grunt.registerTask 'install', 'compile:app'
|
||||
|
||||
grunt.registerTask 'default', ['mkdir', 'run']
|
||||
|
||||
|
||||
118
Jenkinsfile
vendored
118
Jenkinsfile
vendored
@@ -1,79 +1,75 @@
|
||||
pipeline {
|
||||
String cron_string = BRANCH_NAME == "master" ? "@daily" : ""
|
||||
|
||||
pipeline {
|
||||
agent any
|
||||
|
||||
environment {
|
||||
GIT_PROJECT = "clsi"
|
||||
JENKINS_WORKFLOW = "clsi-sharelatex"
|
||||
TARGET_URL = "${env.JENKINS_URL}blue/organizations/jenkins/${JENKINS_WORKFLOW}/detail/$BRANCH_NAME/$BUILD_NUMBER/pipeline"
|
||||
GIT_API_URL = "https://api.github.com/repos/overleaf/${GIT_PROJECT}/statuses/$GIT_COMMIT"
|
||||
}
|
||||
|
||||
triggers {
|
||||
pollSCM('* * * * *')
|
||||
cron('@daily')
|
||||
cron(cron_string)
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('Clean') {
|
||||
steps {
|
||||
// This is a terrible hack to set the file ownership to jenkins:jenkins so we can cleanup the directory
|
||||
sh 'docker run -v $(pwd):/app --rm busybox /bin/chown -R 111:119 /app'
|
||||
sh 'rm -fr node_modules'
|
||||
}
|
||||
}
|
||||
stage('Install') {
|
||||
agent {
|
||||
docker {
|
||||
image 'node:6.11.2'
|
||||
args "-v /var/lib/jenkins/.npm:/tmp/.npm -e HOME=/tmp"
|
||||
reuseNode true
|
||||
}
|
||||
}
|
||||
steps {
|
||||
sh 'git config --global core.logallrefupdates false'
|
||||
sh 'rm -fr node_modules'
|
||||
checkout([$class: 'GitSCM', branches: [[name: '*/master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: '_docker-runner'], [$class: 'CloneOption', shallow: true]], userRemoteConfigs: [[credentialsId: 'GIT_DEPLOY_KEY', url: 'git@github.com:sharelatex/docker-runner-sharelatex']]])
|
||||
sh 'npm install ./_docker-runner'
|
||||
sh 'rm -fr ./_docker-runner ./_docker-runner@tmp'
|
||||
sh 'npm install'
|
||||
sh 'npm rebuild'
|
||||
sh 'npm install --quiet grunt-cli'
|
||||
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {
|
||||
sh "curl $GIT_API_URL \
|
||||
--data '{ \
|
||||
\"state\" : \"pending\", \
|
||||
\"target_url\": \"$TARGET_URL\", \
|
||||
\"description\": \"Your build is underway\", \
|
||||
\"context\": \"ci/jenkins\" }' \
|
||||
-u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD"
|
||||
}
|
||||
}
|
||||
stage('Compile and Test') {
|
||||
agent {
|
||||
docker {
|
||||
image 'node:6.11.2'
|
||||
reuseNode true
|
||||
}
|
||||
}
|
||||
|
||||
stage('Build') {
|
||||
steps {
|
||||
sh 'node_modules/.bin/grunt compile:app'
|
||||
sh 'node_modules/.bin/grunt compile:acceptance_tests'
|
||||
sh 'NODE_ENV=development node_modules/.bin/grunt test:unit'
|
||||
sh 'make build'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Unit Tests') {
|
||||
steps {
|
||||
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_unit'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Acceptance Tests') {
|
||||
environment {
|
||||
TEXLIVE_IMAGE="quay.io/sharelatex/texlive-full:2017.1"
|
||||
}
|
||||
steps {
|
||||
sh 'mkdir -p compiles cache'
|
||||
// Not yet running, due to volumes/sibling containers
|
||||
sh 'docker container prune -f'
|
||||
sh 'docker pull $TEXLIVE_IMAGE'
|
||||
sh 'docker pull sharelatex/acceptance-test-runner:clsi-6.11.2'
|
||||
sh 'docker run --rm -e SIBLING_CONTAINER_USER=root -e SANDBOXED_COMPILES_HOST_DIR=$(pwd)/compiles -e SANDBOXED_COMPILES_SIBLING_CONTAINERS=true -e TEXLIVE_IMAGE=$TEXLIVE_IMAGE -v /var/run/docker.sock:/var/run/docker.sock -v $(pwd):/app sharelatex/acceptance-test-runner:clsi-6.11.2'
|
||||
// This is a terrible hack to set the file ownership to jenkins:jenkins so we can cleanup the directory
|
||||
sh 'docker run -v $(pwd):/app --rm busybox /bin/chown -R 111:119 /app'
|
||||
sh 'rm -r compiles cache server.log db.sqlite config/settings.defaults.coffee'
|
||||
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_acceptance'
|
||||
}
|
||||
}
|
||||
stage('Package') {
|
||||
|
||||
stage('Package and docker push') {
|
||||
steps {
|
||||
sh 'echo ${BUILD_NUMBER} > build_number.txt'
|
||||
sh 'touch build.tar.gz' // Avoid tar warning about files changing during read
|
||||
sh 'tar -czf build.tar.gz --exclude=build.tar.gz --exclude-vcs .'
|
||||
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make tar'
|
||||
|
||||
withCredentials([file(credentialsId: 'gcr.io_overleaf-ops', variable: 'DOCKER_REPO_KEY_PATH')]) {
|
||||
sh 'docker login -u _json_key --password-stdin https://gcr.io/overleaf-ops < ${DOCKER_REPO_KEY_PATH}'
|
||||
}
|
||||
sh 'DOCKER_REPO=gcr.io/overleaf-ops make publish'
|
||||
sh 'docker logout https://gcr.io/overleaf-ops'
|
||||
|
||||
}
|
||||
}
|
||||
stage('Publish') {
|
||||
|
||||
stage('Publish to s3') {
|
||||
steps {
|
||||
sh 'echo ${BRANCH_NAME}-${BUILD_NUMBER} > build_number.txt'
|
||||
withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") {
|
||||
s3Upload(file:'build.tar.gz', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/${BUILD_NUMBER}.tar.gz")
|
||||
}
|
||||
withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") {
|
||||
// The deployment process uses this file to figure out the latest build
|
||||
s3Upload(file:'build_number.txt', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/latest")
|
||||
}
|
||||
@@ -82,11 +78,37 @@ pipeline {
|
||||
}
|
||||
|
||||
post {
|
||||
always {
|
||||
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_clean'
|
||||
sh 'make clean'
|
||||
}
|
||||
|
||||
success {
|
||||
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {
|
||||
sh "curl $GIT_API_URL \
|
||||
--data '{ \
|
||||
\"state\" : \"success\", \
|
||||
\"target_url\": \"$TARGET_URL\", \
|
||||
\"description\": \"Your build succeeded!\", \
|
||||
\"context\": \"ci/jenkins\" }' \
|
||||
-u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD"
|
||||
}
|
||||
}
|
||||
|
||||
failure {
|
||||
mail(from: "${EMAIL_ALERT_FROM}",
|
||||
to: "${EMAIL_ALERT_TO}",
|
||||
subject: "Jenkins build failed: ${JOB_NAME}:${BUILD_NUMBER}",
|
||||
body: "Build: ${BUILD_URL}")
|
||||
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {
|
||||
sh "curl $GIT_API_URL \
|
||||
--data '{ \
|
||||
\"state\" : \"failure\", \
|
||||
\"target_url\": \"$TARGET_URL\", \
|
||||
\"description\": \"Your build failed\", \
|
||||
\"context\": \"ci/jenkins\" }' \
|
||||
-u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
51
Makefile
Normal file
51
Makefile
Normal file
@@ -0,0 +1,51 @@
|
||||
# This file was auto-generated, do not edit it directly.
|
||||
# Instead run bin/update_build_scripts from
|
||||
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||
# Version: 1.1.22
|
||||
|
||||
BUILD_NUMBER ?= local
|
||||
BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||
PROJECT_NAME = clsi
|
||||
DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml
|
||||
DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \
|
||||
BRANCH_NAME=$(BRANCH_NAME) \
|
||||
PROJECT_NAME=$(PROJECT_NAME) \
|
||||
MOCHA_GREP=${MOCHA_GREP} \
|
||||
docker-compose ${DOCKER_COMPOSE_FLAGS}
|
||||
|
||||
clean:
|
||||
docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||
docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||
rm -f app.js
|
||||
rm -rf app/js
|
||||
rm -rf test/unit/js
|
||||
rm -rf test/acceptance/js
|
||||
|
||||
test: test_unit test_acceptance
|
||||
|
||||
test_unit:
|
||||
@[ ! -d test/unit ] && echo "clsi has no unit tests" || $(DOCKER_COMPOSE) run --rm test_unit
|
||||
|
||||
test_acceptance: test_clean test_acceptance_pre_run test_acceptance_run
|
||||
|
||||
test_acceptance_run:
|
||||
@[ ! -d test/acceptance ] && echo "clsi has no acceptance tests" || $(DOCKER_COMPOSE) run --rm test_acceptance
|
||||
|
||||
test_clean:
|
||||
$(DOCKER_COMPOSE) down -v -t 0
|
||||
|
||||
test_acceptance_pre_run:
|
||||
@[ ! -f test/acceptance/js/scripts/pre-run ] && echo "clsi has no pre acceptance tests task" || $(DOCKER_COMPOSE) run --rm test_acceptance test/acceptance/js/scripts/pre-run
|
||||
build:
|
||||
docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
|
||||
--tag gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
|
||||
.
|
||||
|
||||
tar:
|
||||
$(DOCKER_COMPOSE) up tar
|
||||
|
||||
publish:
|
||||
|
||||
docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||
|
||||
.PHONY: clean test test_unit test_acceptance test_clean build publish
|
||||
32
README.md
32
README.md
@@ -1,16 +1,38 @@
|
||||
clsi-sharelatex
|
||||
overleaf/clsi
|
||||
===============
|
||||
|
||||
A web api for compiling LaTeX documents in the cloud
|
||||
|
||||
[](https://travis-ci.org/sharelatex/clsi-sharelatex)
|
||||
The Common LaTeX Service Interface (CLSI) provides a RESTful interface to traditional LaTeX tools (or, more generally, any command line tool for composing marked-up documents into a display format such as PDF or HTML). The CLSI listens on the following ports by default:
|
||||
|
||||
* TCP/3009 - the RESTful interface
|
||||
* TCP/3048 - reports load information
|
||||
* TCP/3049 - HTTP interface to control the CLSI service
|
||||
|
||||
These defaults can be modified in `config/settings.defaults.coffee`.
|
||||
|
||||
The provided `Dockerfile` builds a docker image which has the docker command line tools installed. The configuration in `docker-compose-config.yml` mounts the docker socket, in order that the CLSI container can talk to the docker host it is running in. This allows it to spin up `sibling containers` running an image with a TeX distribution installed to perform the actual compiles.
|
||||
|
||||
The CLSI can be configured through the following environment variables:
|
||||
|
||||
* `DOCKER_RUNNER` - Set to true to use sibling containers
|
||||
* `SYNCTEX_BIN_HOST_PATH` - Path to SyncTeX binary
|
||||
* `COMPILES_HOST_DIR` - Working directory for LaTeX compiles
|
||||
* `SQLITE_PATH` - Path to SQLite database
|
||||
* `TEXLIVE_IMAGE` - The TEXLIVE docker image to use for sibling containers, e.g. `gcr.io/overleaf-ops/texlive-full:2017.1`
|
||||
* `TEXLIVE_IMAGE_USER` - When using sibling containers, the user to run as in the TEXLIVE image. Defaults to `tex`
|
||||
* `TEX_LIVE_IMAGE_NAME_OVERRIDE` - The name of the registry for the docker image e.g. `gcr.io/overleaf-ops`
|
||||
* `FILESTORE_DOMAIN_OVERRIDE` - The url for the filestore service e.g.`http://$FILESTORE_HOST:3009`
|
||||
* `STATSD_HOST` - The address of the Statsd service (used by the metrics module)
|
||||
* `LISTEN_ADDRESS` - The address for the RESTful service to listen on. Set to `0.0.0.0` to listen on all network interfaces
|
||||
* `SMOKE_TEST` - Whether to run smoke tests
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
The CLSI can be installed and set up as part of the entire [ShareLaTeX stack](https://github.com/sharelatex/sharelatex) (complete with front end editor and document storage), or it can be run as a standalone service. To run is as a standalone service, first checkout this repository:
|
||||
The CLSI can be installed and set up as part of the entire [Overleaf stack](https://github.com/overleaf/overleaf) (complete with front end editor and document storage), or it can be run as a standalone service. To run is as a standalone service, first checkout this repository:
|
||||
|
||||
$ git clone git@github.com:sharelatex/clsi-sharelatex.git
|
||||
$ git clone git@github.com:overleaf/clsi.git
|
||||
|
||||
Then install the require npm modules:
|
||||
|
||||
@@ -92,4 +114,4 @@ License
|
||||
|
||||
The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. A copy can be found in the `LICENSE` file.
|
||||
|
||||
Copyright (c) ShareLaTeX, 2014.
|
||||
Copyright (c) Overleaf, 2014-2019.
|
||||
|
||||
89
app.coffee
89
app.coffee
@@ -1,3 +1,6 @@
|
||||
Metrics = require "metrics-sharelatex"
|
||||
Metrics.initialize("clsi")
|
||||
|
||||
CompileController = require "./app/js/CompileController"
|
||||
Settings = require "settings-sharelatex"
|
||||
logger = require "logger-sharelatex"
|
||||
@@ -12,8 +15,7 @@ Errors = require './app/js/Errors'
|
||||
Path = require "path"
|
||||
fs = require "fs"
|
||||
|
||||
Metrics = require "metrics-sharelatex"
|
||||
Metrics.initialize("clsi")
|
||||
|
||||
Metrics.open_sockets.monitor(logger)
|
||||
Metrics.memory.monitor(logger)
|
||||
|
||||
@@ -26,15 +28,17 @@ express = require "express"
|
||||
bodyParser = require "body-parser"
|
||||
app = express()
|
||||
|
||||
Metrics.injectMetricsRoute(app)
|
||||
app.use Metrics.http.monitor(logger)
|
||||
|
||||
# Compile requests can take longer than the default two
|
||||
# minutes (including file download time), so bump up the
|
||||
# timeout a bit.
|
||||
TIMEOUT = 6 * 60 * 1000
|
||||
TIMEOUT = 10 * 60 * 1000
|
||||
app.use (req, res, next) ->
|
||||
req.setTimeout TIMEOUT
|
||||
res.setTimeout TIMEOUT
|
||||
res.removeHeader("X-Powered-By")
|
||||
next()
|
||||
|
||||
app.param 'project_id', (req, res, next, project_id) ->
|
||||
@@ -56,7 +60,7 @@ app.param 'build_id', (req, res, next, build_id) ->
|
||||
next new Error("invalid build id #{build_id}")
|
||||
|
||||
|
||||
app.post "/project/:project_id/compile", bodyParser.json(limit: "5mb"), CompileController.compile
|
||||
app.post "/project/:project_id/compile", bodyParser.json(limit: Settings.compileSizeLimit), CompileController.compile
|
||||
app.post "/project/:project_id/compile/stop", CompileController.stopCompile
|
||||
app.delete "/project/:project_id", CompileController.clearCache
|
||||
|
||||
@@ -66,7 +70,7 @@ app.get "/project/:project_id/wordcount", CompileController.wordcount
|
||||
app.get "/project/:project_id/status", CompileController.status
|
||||
|
||||
# Per-user containers
|
||||
app.post "/project/:project_id/user/:user_id/compile", bodyParser.json(limit: "5mb"), CompileController.compile
|
||||
app.post "/project/:project_id/user/:user_id/compile", bodyParser.json(limit: Settings.compileSizeLimit), CompileController.compile
|
||||
app.post "/project/:project_id/user/:user_id/compile/stop", CompileController.stopCompile
|
||||
app.delete "/project/:project_id/user/:user_id", CompileController.clearCache
|
||||
|
||||
@@ -139,7 +143,10 @@ app.get "/health_check", (req, res)->
|
||||
res.contentType(resCacher?.setContentType)
|
||||
res.status(resCacher?.code).send(resCacher?.body)
|
||||
|
||||
profiler = require "v8-profiler"
|
||||
app.get "/smoke_test_force", (req, res)->
|
||||
smokeTest.run(require.resolve(__dirname + "/test/smoke/js/SmokeTests.js"))(req, res)
|
||||
|
||||
profiler = require "v8-profiler-node8"
|
||||
app.get "/profile", (req, res) ->
|
||||
time = parseInt(req.query.time || "1000")
|
||||
profiler.startProfiling("test")
|
||||
@@ -160,9 +167,77 @@ app.use (error, req, res, next) ->
|
||||
logger.error {err: error, url: req.url}, "server error"
|
||||
res.sendStatus(error?.statusCode || 500)
|
||||
|
||||
app.listen port = (Settings.internal?.clsi?.port or 3013), host = (Settings.internal?.clsi?.host or "localhost"), (error) ->
|
||||
net = require "net"
|
||||
os = require "os"
|
||||
|
||||
STATE = "up"
|
||||
|
||||
|
||||
loadTcpServer = net.createServer (socket) ->
|
||||
socket.on "error", (err)->
|
||||
if err.code == "ECONNRESET"
|
||||
# this always comes up, we don't know why
|
||||
return
|
||||
logger.err err:err, "error with socket on load check"
|
||||
socket.destroy()
|
||||
|
||||
if STATE == "up" and Settings.internal.load_balancer_agent.report_load
|
||||
currentLoad = os.loadavg()[0]
|
||||
|
||||
# staging clis's have 1 cpu core only
|
||||
if os.cpus().length == 1
|
||||
availableWorkingCpus = 1
|
||||
else
|
||||
availableWorkingCpus = os.cpus().length - 1
|
||||
|
||||
freeLoad = availableWorkingCpus - currentLoad
|
||||
freeLoadPercentage = Math.round((freeLoad / availableWorkingCpus) * 100)
|
||||
if freeLoadPercentage <= 0
|
||||
freeLoadPercentage = 1 # when its 0 the server is set to drain and will move projects to different servers
|
||||
socket.write("up, #{freeLoadPercentage}%\n", "ASCII")
|
||||
socket.end()
|
||||
else
|
||||
socket.write("#{STATE}\n", "ASCII")
|
||||
socket.end()
|
||||
|
||||
loadHttpServer = express()
|
||||
|
||||
loadHttpServer.post "/state/up", (req, res, next) ->
|
||||
STATE = "up"
|
||||
logger.info "getting message to set server to down"
|
||||
res.sendStatus 204
|
||||
|
||||
loadHttpServer.post "/state/down", (req, res, next) ->
|
||||
STATE = "down"
|
||||
logger.info "getting message to set server to down"
|
||||
res.sendStatus 204
|
||||
|
||||
loadHttpServer.post "/state/maint", (req, res, next) ->
|
||||
STATE = "maint"
|
||||
logger.info "getting message to set server to maint"
|
||||
res.sendStatus 204
|
||||
|
||||
|
||||
port = (Settings.internal?.clsi?.port or 3013)
|
||||
host = (Settings.internal?.clsi?.host or "localhost")
|
||||
|
||||
load_tcp_port = Settings.internal.load_balancer_agent.load_port
|
||||
load_http_port = Settings.internal.load_balancer_agent.local_port
|
||||
|
||||
if !module.parent # Called directly
|
||||
app.listen port, host, (error) ->
|
||||
logger.info "CLSI starting up, listening on #{host}:#{port}"
|
||||
|
||||
loadTcpServer.listen load_tcp_port, host, (error) ->
|
||||
throw error if error?
|
||||
logger.info "Load tcp agent listening on load port #{load_tcp_port}"
|
||||
|
||||
loadHttpServer.listen load_http_port, host, (error) ->
|
||||
throw error if error?
|
||||
logger.info "Load http agent listening on load port #{load_http_port}"
|
||||
|
||||
module.exports = app
|
||||
|
||||
setInterval () ->
|
||||
ProjectPersistenceManager.clearExpiredProjects()
|
||||
, tenMinutes = 10 * 60 * 1000
|
||||
|
||||
@@ -1,44 +1,11 @@
|
||||
spawn = require("child_process").spawn
|
||||
Settings = require "settings-sharelatex"
|
||||
logger = require "logger-sharelatex"
|
||||
|
||||
logger.info "using standard command runner"
|
||||
|
||||
module.exports = CommandRunner =
|
||||
run: (project_id, command, directory, image, timeout, environment, callback = (error) ->) ->
|
||||
command = (arg.replace('$COMPILE_DIR', directory) for arg in command)
|
||||
logger.log project_id: project_id, command: command, directory: directory, "running command"
|
||||
logger.warn "timeouts and sandboxing are not enabled with CommandRunner"
|
||||
|
||||
# merge environment settings
|
||||
env = {}
|
||||
env[key] = value for key, value of process.env
|
||||
env[key] = value for key, value of environment
|
||||
|
||||
# run command as detached process so it has its own process group (which can be killed if needed)
|
||||
proc = spawn command[0], command.slice(1), stdio: "inherit", cwd: directory, detached: true, env: env
|
||||
|
||||
proc.on "error", (err)->
|
||||
logger.err err:err, project_id:project_id, command: command, directory: directory, "error running command"
|
||||
callback(err)
|
||||
|
||||
proc.on "close", (code, signal) ->
|
||||
logger.info code:code, signal:signal, project_id:project_id, "command exited"
|
||||
if signal is 'SIGTERM' # signal from kill method below
|
||||
err = new Error("terminated")
|
||||
err.terminated = true
|
||||
return callback(err)
|
||||
else if code is 1 # exit status from chktex
|
||||
err = new Error("exited")
|
||||
err.code = code
|
||||
return callback(err)
|
||||
if Settings.clsi?.dockerRunner == true
|
||||
commandRunnerPath = "./DockerRunner"
|
||||
else
|
||||
callback()
|
||||
commandRunnerPath = "./LocalCommandRunner"
|
||||
logger.info commandRunnerPath:commandRunnerPath, "selecting command runner for clsi"
|
||||
CommandRunner = require(commandRunnerPath)
|
||||
|
||||
return proc.pid # return process id to allow job to be killed if necessary
|
||||
|
||||
kill: (pid, callback = (error) ->) ->
|
||||
try
|
||||
process.kill -pid # kill all processes in group
|
||||
catch err
|
||||
return callback(err)
|
||||
callback()
|
||||
module.exports = CommandRunner
|
||||
|
||||
@@ -33,12 +33,17 @@ module.exports = CompileController =
|
||||
else
|
||||
status = "error"
|
||||
code = 500
|
||||
logger.error err: error, project_id: request.project_id, "error running compile"
|
||||
logger.warn err: error, project_id: request.project_id, "error running compile"
|
||||
|
||||
else
|
||||
status = "failure"
|
||||
for file in outputFiles
|
||||
if file.path?.match(/output\.pdf$/)
|
||||
status = "success"
|
||||
|
||||
if status == "failure"
|
||||
logger.warn project_id: request.project_id, outputFiles:outputFiles, "project failed to compile successfully, no output.pdf generated"
|
||||
|
||||
# log an error if any core files are found
|
||||
for file in outputFiles
|
||||
if file.path is "core"
|
||||
@@ -77,10 +82,9 @@ module.exports = CompileController =
|
||||
column = parseInt(req.query.column, 10)
|
||||
project_id = req.params.project_id
|
||||
user_id = req.params.user_id
|
||||
|
||||
CompileManager.syncFromCode project_id, user_id, file, line, column, (error, pdfPositions) ->
|
||||
return next(error) if error?
|
||||
res.send JSON.stringify {
|
||||
res.json {
|
||||
pdf: pdfPositions
|
||||
}
|
||||
|
||||
@@ -90,10 +94,9 @@ module.exports = CompileController =
|
||||
v = parseFloat(req.query.v)
|
||||
project_id = req.params.project_id
|
||||
user_id = req.params.user_id
|
||||
|
||||
CompileManager.syncFromPdf project_id, user_id, page, h, v, (error, codePositions) ->
|
||||
return next(error) if error?
|
||||
res.send JSON.stringify {
|
||||
res.json {
|
||||
code: codePositions
|
||||
}
|
||||
|
||||
@@ -106,7 +109,7 @@ module.exports = CompileController =
|
||||
|
||||
CompileManager.wordcount project_id, user_id, file, image, (error, result) ->
|
||||
return next(error) if error?
|
||||
res.send JSON.stringify {
|
||||
res.json {
|
||||
texcount: result
|
||||
}
|
||||
|
||||
|
||||
@@ -15,10 +15,7 @@ fse = require "fs-extra"
|
||||
os = require("os")
|
||||
async = require "async"
|
||||
Errors = require './Errors'
|
||||
|
||||
commandRunner = Settings.clsi?.commandRunner or "./CommandRunner"
|
||||
logger.info commandRunner:commandRunner, "selecting command runner for clsi"
|
||||
CommandRunner = require(commandRunner)
|
||||
CommandRunner = require "./CommandRunner"
|
||||
|
||||
getCompileName = (project_id, user_id) ->
|
||||
if user_id? then "#{project_id}-#{user_id}" else project_id
|
||||
@@ -41,7 +38,6 @@ module.exports = CompileManager =
|
||||
|
||||
doCompile: (request, callback = (error, outputFiles) ->) ->
|
||||
compileDir = getCompileDir(request.project_id, request.user_id)
|
||||
|
||||
timer = new Metrics.Timer("write-to-disk")
|
||||
logger.log project_id: request.project_id, user_id: request.user_id, "syncing resources to disk"
|
||||
ResourceWriter.syncResourcesToDisk request, compileDir, (error, resourceList) ->
|
||||
@@ -62,9 +58,9 @@ module.exports = CompileManager =
|
||||
callback()
|
||||
|
||||
createTikzFileIfRequired = (callback) ->
|
||||
TikzManager.checkMainFile compileDir, request.rootResourcePath, resourceList, (error, usesTikzExternalize) ->
|
||||
TikzManager.checkMainFile compileDir, request.rootResourcePath, resourceList, (error, needsMainFile) ->
|
||||
return callback(error) if error?
|
||||
if usesTikzExternalize
|
||||
if needsMainFile
|
||||
TikzManager.injectOutputFile compileDir, request.rootResourcePath, callback
|
||||
else
|
||||
callback()
|
||||
@@ -97,6 +93,7 @@ module.exports = CompileManager =
|
||||
compiler: request.compiler
|
||||
timeout: request.timeout
|
||||
image: request.imageName
|
||||
flags: request.flags
|
||||
environment: env
|
||||
}, (error, output, stats, timings) ->
|
||||
# request was for validation only
|
||||
@@ -205,18 +202,28 @@ module.exports = CompileManager =
|
||||
base_dir = Settings.path.synctexBaseDir(compileName)
|
||||
file_path = base_dir + "/" + file_name
|
||||
compileDir = getCompileDir(project_id, user_id)
|
||||
synctex_path = Path.join(compileDir, "output.pdf")
|
||||
CompileManager._runSynctex ["code", synctex_path, file_path, line, column], (error, stdout) ->
|
||||
synctex_path = "#{base_dir}/output.pdf"
|
||||
command = ["code", synctex_path, file_path, line, column]
|
||||
fse.ensureDir compileDir, (error) ->
|
||||
if error?
|
||||
logger.err {error, project_id, user_id, file_name}, "error ensuring dir for sync from code"
|
||||
return callback(error)
|
||||
CompileManager._runSynctex project_id, user_id, command, (error, stdout) ->
|
||||
return callback(error) if error?
|
||||
logger.log project_id: project_id, user_id:user_id, file_name: file_name, line: line, column: column, stdout: stdout, "synctex code output"
|
||||
logger.log project_id: project_id, user_id:user_id, file_name: file_name, line: line, column: column, command:command, stdout: stdout, "synctex code output"
|
||||
callback null, CompileManager._parseSynctexFromCodeOutput(stdout)
|
||||
|
||||
syncFromPdf: (project_id, user_id, page, h, v, callback = (error, filePositions) ->) ->
|
||||
compileName = getCompileName(project_id, user_id)
|
||||
base_dir = Settings.path.synctexBaseDir(compileName)
|
||||
compileDir = getCompileDir(project_id, user_id)
|
||||
synctex_path = Path.join(compileDir, "output.pdf")
|
||||
CompileManager._runSynctex ["pdf", synctex_path, page, h, v], (error, stdout) ->
|
||||
base_dir = Settings.path.synctexBaseDir(compileName)
|
||||
synctex_path = "#{base_dir}/output.pdf"
|
||||
command = ["pdf", synctex_path, page, h, v]
|
||||
fse.ensureDir compileDir, (error) ->
|
||||
if error?
|
||||
logger.err {error, project_id, user_id, file_name}, "error ensuring dir for sync to code"
|
||||
return callback(error)
|
||||
CompileManager._runSynctex project_id, user_id, command, (error, stdout) ->
|
||||
return callback(error) if error?
|
||||
logger.log project_id: project_id, user_id:user_id, page: page, h: h, v:v, stdout: stdout, "synctex pdf output"
|
||||
callback null, CompileManager._parseSynctexFromPdfOutput(stdout, base_dir)
|
||||
@@ -235,19 +242,19 @@ module.exports = CompileManager =
|
||||
return callback(new Error("not a file")) if not stats?.isFile()
|
||||
callback()
|
||||
|
||||
_runSynctex: (args, callback = (error, stdout) ->) ->
|
||||
bin_path = Path.resolve(__dirname + "/../../bin/synctex")
|
||||
_runSynctex: (project_id, user_id, command, callback = (error, stdout) ->) ->
|
||||
seconds = 1000
|
||||
outputFilePath = args[1]
|
||||
CompileManager._checkFileExists outputFilePath, (error) ->
|
||||
return callback(error) if error?
|
||||
if Settings.clsi?.synctexCommandWrapper?
|
||||
[bin_path, args] = Settings.clsi?.synctexCommandWrapper bin_path, args
|
||||
child_process.execFile bin_path, args, timeout: 10 * seconds, (error, stdout, stderr) ->
|
||||
|
||||
command.unshift("/opt/synctex")
|
||||
|
||||
directory = getCompileDir(project_id, user_id)
|
||||
timeout = 60 * 1000 # increased to allow for large projects
|
||||
compileName = getCompileName(project_id, user_id)
|
||||
CommandRunner.run compileName, command, directory, Settings.clsi?.docker.image, timeout, {}, (error, output) ->
|
||||
if error?
|
||||
logger.err err:error, args:args, "error running synctex"
|
||||
logger.err err:error, command:command, project_id:project_id, user_id:user_id, "error running synctex"
|
||||
return callback(error)
|
||||
callback(null, stdout)
|
||||
callback(null, output.stdout)
|
||||
|
||||
_parseSynctexFromCodeOutput: (output) ->
|
||||
results = []
|
||||
@@ -276,19 +283,24 @@ module.exports = CompileManager =
|
||||
}
|
||||
return results
|
||||
|
||||
|
||||
wordcount: (project_id, user_id, file_name, image, callback = (error, pdfPositions) ->) ->
|
||||
logger.log project_id:project_id, user_id:user_id, file_name:file_name, image:image, "running wordcount"
|
||||
file_path = "$COMPILE_DIR/" + file_name
|
||||
command = [ "texcount", '-nocol', '-inc', file_path, "-out=" + file_path + ".wc"]
|
||||
directory = getCompileDir(project_id, user_id)
|
||||
timeout = 10 * 1000
|
||||
compileDir = getCompileDir(project_id, user_id)
|
||||
timeout = 60 * 1000
|
||||
compileName = getCompileName(project_id, user_id)
|
||||
|
||||
CommandRunner.run compileName, command, directory, image, timeout, {}, (error) ->
|
||||
fse.ensureDir compileDir, (error) ->
|
||||
if error?
|
||||
logger.err {error, project_id, user_id, file_name}, "error ensuring dir for sync from code"
|
||||
return callback(error)
|
||||
CommandRunner.run compileName, command, compileDir, image, timeout, {}, (error) ->
|
||||
return callback(error) if error?
|
||||
fs.readFile directory + "/" + file_name + ".wc", "utf-8", (err, stdout) ->
|
||||
fs.readFile compileDir + "/" + file_name + ".wc", "utf-8", (err, stdout) ->
|
||||
if err?
|
||||
logger.err err:err, command:command, directory:directory, project_id:project_id, user_id:user_id, "error reading word count output"
|
||||
#call it node_err so sentry doesn't use random path error as unique id so it can't be ignored
|
||||
logger.err node_err:err, command:command, compileDir:compileDir, project_id:project_id, user_id:user_id, "error reading word count output"
|
||||
return callback(err)
|
||||
results = CompileManager._parseWordcountFromOutput(stdout)
|
||||
logger.log project_id:project_id, user_id:user_id, wordcount: results, "word count results"
|
||||
|
||||
13
app/coffee/DbQueue.coffee
Normal file
13
app/coffee/DbQueue.coffee
Normal file
@@ -0,0 +1,13 @@
|
||||
async = require "async"
|
||||
Settings = require "settings-sharelatex"
|
||||
logger = require("logger-sharelatex")
|
||||
queue = async.queue((task, cb)->
|
||||
task(cb)
|
||||
, Settings.parallelSqlQueryLimit)
|
||||
|
||||
queue.drain = ()->
|
||||
logger.debug('all items have been processed')
|
||||
|
||||
module.exports =
|
||||
queue: queue
|
||||
|
||||
56
app/coffee/DockerLockManager.coffee
Normal file
56
app/coffee/DockerLockManager.coffee
Normal file
@@ -0,0 +1,56 @@
|
||||
logger = require "logger-sharelatex"
|
||||
|
||||
LockState = {} # locks for docker container operations, by container name
|
||||
|
||||
module.exports = LockManager =
|
||||
|
||||
MAX_LOCK_HOLD_TIME: 15000 # how long we can keep a lock
|
||||
MAX_LOCK_WAIT_TIME: 10000 # how long we wait for a lock
|
||||
LOCK_TEST_INTERVAL: 1000 # retry time
|
||||
|
||||
tryLock: (key, callback = (err, gotLock) ->) ->
|
||||
existingLock = LockState[key]
|
||||
if existingLock? # the lock is already taken, check how old it is
|
||||
lockAge = Date.now() - existingLock.created
|
||||
if lockAge < LockManager.MAX_LOCK_HOLD_TIME
|
||||
return callback(null, false) # we didn't get the lock, bail out
|
||||
else
|
||||
logger.error {key: key, lock: existingLock, age:lockAge}, "taking old lock by force"
|
||||
# take the lock
|
||||
LockState[key] = lockValue = {created: Date.now()}
|
||||
callback(null, true, lockValue)
|
||||
|
||||
getLock: (key, callback = (error, lockValue) ->) ->
|
||||
startTime = Date.now()
|
||||
do attempt = () ->
|
||||
LockManager.tryLock key, (error, gotLock, lockValue) ->
|
||||
return callback(error) if error?
|
||||
if gotLock
|
||||
callback(null, lockValue)
|
||||
else if Date.now() - startTime > LockManager.MAX_LOCK_WAIT_TIME
|
||||
e = new Error("Lock timeout")
|
||||
e.key = key
|
||||
return callback(e)
|
||||
else
|
||||
setTimeout attempt, LockManager.LOCK_TEST_INTERVAL
|
||||
|
||||
releaseLock: (key, lockValue, callback = (error) ->) ->
|
||||
existingLock = LockState[key]
|
||||
if existingLock is lockValue # lockValue is an object, so we can test by reference
|
||||
delete LockState[key] # our lock, so we can free it
|
||||
callback()
|
||||
else if existingLock? # lock exists but doesn't match ours
|
||||
logger.error {key:key, lock: existingLock}, "tried to release lock taken by force"
|
||||
callback()
|
||||
else
|
||||
logger.error {key:key, lock: existingLock}, "tried to release lock that has gone"
|
||||
callback()
|
||||
|
||||
runWithLock: (key, runner = ( (releaseLock = (error) ->) -> ), callback = ( (error) -> )) ->
|
||||
LockManager.getLock key, (error, lockValue) ->
|
||||
return callback(error) if error?
|
||||
runner (error1, args...) ->
|
||||
LockManager.releaseLock key, lockValue, (error2) ->
|
||||
error = error1 or error2
|
||||
return callback(error) if error?
|
||||
callback(null, args...)
|
||||
358
app/coffee/DockerRunner.coffee
Normal file
358
app/coffee/DockerRunner.coffee
Normal file
@@ -0,0 +1,358 @@
|
||||
Settings = require "settings-sharelatex"
|
||||
logger = require "logger-sharelatex"
|
||||
Docker = require("dockerode")
|
||||
dockerode = new Docker()
|
||||
crypto = require "crypto"
|
||||
async = require "async"
|
||||
LockManager = require "./DockerLockManager"
|
||||
fs = require "fs"
|
||||
Path = require 'path'
|
||||
_ = require "underscore"
|
||||
|
||||
logger.info "using docker runner"
|
||||
|
||||
usingSiblingContainers = () ->
|
||||
Settings?.path?.sandboxedCompilesHostDir?
|
||||
|
||||
module.exports = DockerRunner =
|
||||
ERR_NOT_DIRECTORY: new Error("not a directory")
|
||||
ERR_TERMINATED: new Error("terminated")
|
||||
ERR_EXITED: new Error("exited")
|
||||
ERR_TIMED_OUT: new Error("container timed out")
|
||||
|
||||
run: (project_id, command, directory, image, timeout, environment, callback = (error, output) ->) ->
|
||||
|
||||
if usingSiblingContainers()
|
||||
_newPath = Settings.path.sandboxedCompilesHostDir
|
||||
logger.log {path: _newPath}, "altering bind path for sibling containers"
|
||||
# Server Pro, example:
|
||||
# '/var/lib/sharelatex/data/compiles/<project-id>'
|
||||
# ... becomes ...
|
||||
# '/opt/sharelatex_data/data/compiles/<project-id>'
|
||||
directory = Path.join(Settings.path.sandboxedCompilesHostDir, Path.basename(directory))
|
||||
|
||||
volumes = {}
|
||||
volumes[directory] = "/compile"
|
||||
|
||||
command = (arg.toString().replace?('$COMPILE_DIR', "/compile") for arg in command)
|
||||
if !image?
|
||||
image = Settings.clsi.docker.image
|
||||
|
||||
if Settings.texliveImageNameOveride?
|
||||
img = image.split("/")
|
||||
image = "#{Settings.texliveImageNameOveride}/#{img[2]}"
|
||||
|
||||
options = DockerRunner._getContainerOptions(command, image, volumes, timeout, environment)
|
||||
fingerprint = DockerRunner._fingerprintContainer(options)
|
||||
options.name = name = "project-#{project_id}-#{fingerprint}"
|
||||
|
||||
# logOptions = _.clone(options)
|
||||
# logOptions?.HostConfig?.SecurityOpt = "secomp used, removed in logging"
|
||||
logger.log project_id: project_id, "running docker container"
|
||||
DockerRunner._runAndWaitForContainer options, volumes, timeout, (error, output) ->
|
||||
if error?.message?.match("HTTP code is 500")
|
||||
logger.log err: error, project_id: project_id, "error running container so destroying and retrying"
|
||||
DockerRunner.destroyContainer name, null, true, (error) ->
|
||||
return callback(error) if error?
|
||||
DockerRunner._runAndWaitForContainer options, volumes, timeout, callback
|
||||
else
|
||||
callback(error, output)
|
||||
|
||||
return name # pass back the container name to allow it to be killed
|
||||
|
||||
kill: (container_id, callback = (error) ->) ->
|
||||
logger.log container_id: container_id, "sending kill signal to container"
|
||||
container = dockerode.getContainer(container_id)
|
||||
container.kill (error) ->
|
||||
if error? and error?.message?.match?(/Cannot kill container .* is not running/)
|
||||
logger.warn err: error, container_id: container_id, "container not running, continuing"
|
||||
error = null
|
||||
if error?
|
||||
logger.error err: error, container_id: container_id, "error killing container"
|
||||
return callback(error)
|
||||
else
|
||||
callback()
|
||||
|
||||
_runAndWaitForContainer: (options, volumes, timeout, _callback = (error, output) ->) ->
|
||||
callback = (args...) ->
|
||||
_callback(args...)
|
||||
# Only call the callback once
|
||||
_callback = () ->
|
||||
|
||||
name = options.name
|
||||
|
||||
streamEnded = false
|
||||
containerReturned = false
|
||||
output = {}
|
||||
|
||||
callbackIfFinished = () ->
|
||||
if streamEnded and containerReturned
|
||||
callback(null, output)
|
||||
|
||||
attachStreamHandler = (error, _output) ->
|
||||
return callback(error) if error?
|
||||
output = _output
|
||||
streamEnded = true
|
||||
callbackIfFinished()
|
||||
|
||||
DockerRunner.startContainer options, volumes, attachStreamHandler, (error, containerId) ->
|
||||
return callback(error) if error?
|
||||
|
||||
DockerRunner.waitForContainer name, timeout, (error, exitCode) ->
|
||||
return callback(error) if error?
|
||||
if exitCode is 137 # exit status from kill -9
|
||||
err = DockerRunner.ERR_TERMINATED
|
||||
err.terminated = true
|
||||
return callback(err)
|
||||
if exitCode is 1 # exit status from chktex
|
||||
err = DockerRunner.ERR_EXITED
|
||||
err.code = exitCode
|
||||
return callback(err)
|
||||
containerReturned = true
|
||||
options?.HostConfig?.SecurityOpt = null #small log line
|
||||
logger.log err:err, exitCode:exitCode, options:options, "docker container has exited"
|
||||
callbackIfFinished()
|
||||
|
||||
_getContainerOptions: (command, image, volumes, timeout, environment) ->
|
||||
timeoutInSeconds = timeout / 1000
|
||||
|
||||
dockerVolumes = {}
|
||||
for hostVol, dockerVol of volumes
|
||||
dockerVolumes[dockerVol] = {}
|
||||
|
||||
if volumes[hostVol].slice(-3).indexOf(":r") == -1
|
||||
volumes[hostVol] = "#{dockerVol}:rw"
|
||||
|
||||
# merge settings and environment parameter
|
||||
env = {}
|
||||
for src in [Settings.clsi.docker.env, environment or {}]
|
||||
env[key] = value for key, value of src
|
||||
# set the path based on the image year
|
||||
if m = image.match /:([0-9]+)\.[0-9]+/
|
||||
year = m[1]
|
||||
else
|
||||
year = "2014"
|
||||
env['PATH'] = "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/#{year}/bin/x86_64-linux/"
|
||||
options =
|
||||
"Cmd" : command,
|
||||
"Image" : image
|
||||
"Volumes" : dockerVolumes
|
||||
"WorkingDir" : "/compile"
|
||||
"NetworkDisabled" : true
|
||||
"Memory" : 1024 * 1024 * 1024 * 1024 # 1 Gb
|
||||
"User" : Settings.clsi.docker.user
|
||||
"Env" : ("#{key}=#{value}" for key, value of env) # convert the environment hash to an array
|
||||
"HostConfig" :
|
||||
"Binds": ("#{hostVol}:#{dockerVol}" for hostVol, dockerVol of volumes)
|
||||
"LogConfig": {"Type": "none", "Config": {}}
|
||||
"Ulimits": [{'Name': 'cpu', 'Soft': timeoutInSeconds+5, 'Hard': timeoutInSeconds+10}]
|
||||
"CapDrop": "ALL"
|
||||
"SecurityOpt": ["no-new-privileges"]
|
||||
|
||||
|
||||
if Settings.path?.synctexBinHostPath?
|
||||
options["HostConfig"]["Binds"].push("#{Settings.path.synctexBinHostPath}:/opt/synctex:ro")
|
||||
|
||||
if Settings.clsi.docker.seccomp_profile?
|
||||
options.HostConfig.SecurityOpt.push "seccomp=#{Settings.clsi.docker.seccomp_profile}"
|
||||
|
||||
return options
|
||||
|
||||
_fingerprintContainer: (containerOptions) ->
|
||||
# Yay, Hashing!
|
||||
json = JSON.stringify(containerOptions)
|
||||
return crypto.createHash("md5").update(json).digest("hex")
|
||||
|
||||
startContainer: (options, volumes, attachStreamHandler, callback) ->
|
||||
LockManager.runWithLock options.name, (releaseLock) ->
|
||||
# Check that volumes exist before starting the container.
|
||||
# When a container is started with volume pointing to a
|
||||
# non-existent directory then docker creates the directory but
|
||||
# with root ownership.
|
||||
DockerRunner._checkVolumes options, volumes, (err) ->
|
||||
return releaseLock(err) if err?
|
||||
DockerRunner._startContainer options, volumes, attachStreamHandler, releaseLock
|
||||
, callback
|
||||
|
||||
# Check that volumes exist and are directories
|
||||
_checkVolumes: (options, volumes, callback = (error, containerName) ->) ->
|
||||
if usingSiblingContainers()
|
||||
# Server Pro, with sibling-containers active, skip checks
|
||||
return callback(null)
|
||||
|
||||
checkVolume = (path, cb) ->
|
||||
fs.stat path, (err, stats) ->
|
||||
return cb(err) if err?
|
||||
return cb(DockerRunner.ERR_NOT_DIRECTORY) if not stats?.isDirectory()
|
||||
cb()
|
||||
jobs = []
|
||||
for vol of volumes
|
||||
do (vol) ->
|
||||
jobs.push (cb) -> checkVolume(vol, cb)
|
||||
async.series jobs, callback
|
||||
|
||||
_startContainer: (options, volumes, attachStreamHandler, callback = ((error, output) ->)) ->
|
||||
callback = _.once(callback)
|
||||
name = options.name
|
||||
|
||||
logger.log {container_name: name}, "starting container"
|
||||
container = dockerode.getContainer(name)
|
||||
|
||||
createAndStartContainer = ->
|
||||
dockerode.createContainer options, (error, container) ->
|
||||
return callback(error) if error?
|
||||
startExistingContainer()
|
||||
|
||||
startExistingContainer = ->
|
||||
DockerRunner.attachToContainer options.name, attachStreamHandler, (error)->
|
||||
return callback(error) if error?
|
||||
container.start (error) ->
|
||||
if error? and error?.statusCode != 304 #already running
|
||||
return callback(error)
|
||||
else
|
||||
callback()
|
||||
|
||||
container.inspect (error, stats)->
|
||||
if error?.statusCode == 404
|
||||
createAndStartContainer()
|
||||
else if error?
|
||||
logger.err {container_name: name, error:error}, "unable to inspect container to start"
|
||||
return callback(error)
|
||||
else
|
||||
startExistingContainer()
|
||||
|
||||
|
||||
attachToContainer: (containerId, attachStreamHandler, attachStartCallback) ->
|
||||
container = dockerode.getContainer(containerId)
|
||||
container.attach {stdout: 1, stderr: 1, stream: 1}, (error, stream) ->
|
||||
if error?
|
||||
logger.error err: error, container_id: containerId, "error attaching to container"
|
||||
return attachStartCallback(error)
|
||||
else
|
||||
attachStartCallback()
|
||||
|
||||
|
||||
logger.log container_id: containerId, "attached to container"
|
||||
|
||||
MAX_OUTPUT = 1024 * 1024 # limit output to 1MB
|
||||
createStringOutputStream = (name) ->
|
||||
return {
|
||||
data: ""
|
||||
overflowed: false
|
||||
write: (data) ->
|
||||
return if @overflowed
|
||||
if @data.length < MAX_OUTPUT
|
||||
@data += data
|
||||
else
|
||||
logger.error container_id: containerId, length: @data.length, maxLen: MAX_OUTPUT, "#{name} exceeds max size"
|
||||
@data += "(...truncated at #{MAX_OUTPUT} chars...)"
|
||||
@overflowed = true
|
||||
# kill container if too much output
|
||||
# docker.containers.kill(containerId, () ->)
|
||||
}
|
||||
|
||||
stdout = createStringOutputStream "stdout"
|
||||
stderr = createStringOutputStream "stderr"
|
||||
|
||||
container.modem.demuxStream(stream, stdout, stderr)
|
||||
|
||||
stream.on "error", (err) ->
|
||||
logger.error err: err, container_id: containerId, "error reading from container stream"
|
||||
|
||||
stream.on "end", () ->
|
||||
attachStreamHandler null, {stdout: stdout.data, stderr: stderr.data}
|
||||
|
||||
waitForContainer: (containerId, timeout, _callback = (error, exitCode) ->) ->
|
||||
callback = (args...) ->
|
||||
_callback(args...)
|
||||
# Only call the callback once
|
||||
_callback = () ->
|
||||
|
||||
container = dockerode.getContainer(containerId)
|
||||
|
||||
timedOut = false
|
||||
timeoutId = setTimeout () ->
|
||||
timedOut = true
|
||||
logger.log container_id: containerId, "timeout reached, killing container"
|
||||
container.kill(() ->)
|
||||
, timeout
|
||||
|
||||
logger.log container_id: containerId, "waiting for docker container"
|
||||
container.wait (error, res) ->
|
||||
if error?
|
||||
clearTimeout timeoutId
|
||||
logger.error err: error, container_id: containerId, "error waiting for container"
|
||||
return callback(error)
|
||||
if timedOut
|
||||
logger.log containerId: containerId, "docker container timed out"
|
||||
error = DockerRunner.ERR_TIMED_OUT
|
||||
error.timedout = true
|
||||
callback error
|
||||
else
|
||||
clearTimeout timeoutId
|
||||
logger.log container_id: containerId, exitCode: res.StatusCode, "docker container returned"
|
||||
callback null, res.StatusCode
|
||||
|
||||
destroyContainer: (containerName, containerId, shouldForce, callback = (error) ->) ->
|
||||
# We want the containerName for the lock and, ideally, the
|
||||
# containerId to delete. There is a bug in the docker.io module
|
||||
# where if you delete by name and there is an error, it throws an
|
||||
# async exception, but if you delete by id it just does a normal
|
||||
# error callback. We fall back to deleting by name if no id is
|
||||
# supplied.
|
||||
LockManager.runWithLock containerName, (releaseLock) ->
|
||||
DockerRunner._destroyContainer containerId or containerName, shouldForce, releaseLock
|
||||
, callback
|
||||
|
||||
_destroyContainer: (containerId, shouldForce, callback = (error) ->) ->
|
||||
logger.log container_id: containerId, "destroying docker container"
|
||||
container = dockerode.getContainer(containerId)
|
||||
container.remove {force: shouldForce == true}, (error) ->
|
||||
if error? and error?.statusCode == 404
|
||||
logger.warn err: error, container_id: containerId, "container not found, continuing"
|
||||
error = null
|
||||
if error?
|
||||
logger.error err: error, container_id: containerId, "error destroying container"
|
||||
else
|
||||
logger.log container_id: containerId, "destroyed container"
|
||||
callback(error)
|
||||
|
||||
# handle expiry of docker containers
|
||||
|
||||
MAX_CONTAINER_AGE: Settings.clsi.docker.maxContainerAge or oneHour = 60 * 60 * 1000
|
||||
|
||||
examineOldContainer: (container, callback = (error, name, id, ttl)->) ->
|
||||
name = container.Name or container.Names?[0]
|
||||
created = container.Created * 1000 # creation time is returned in seconds
|
||||
now = Date.now()
|
||||
age = now - created
|
||||
maxAge = DockerRunner.MAX_CONTAINER_AGE
|
||||
ttl = maxAge - age
|
||||
logger.log {containerName: name, created: created, now: now, age: age, maxAge: maxAge, ttl: ttl}, "checking whether to destroy container"
|
||||
callback(null, name, container.Id, ttl)
|
||||
|
||||
destroyOldContainers: (callback = (error) ->) ->
|
||||
dockerode.listContainers all: true, (error, containers) ->
|
||||
return callback(error) if error?
|
||||
jobs = []
|
||||
for container in containers or []
|
||||
do (container) ->
|
||||
DockerRunner.examineOldContainer container, (err, name, id, ttl) ->
|
||||
if name.slice(0, 9) == '/project-' && ttl <= 0
|
||||
jobs.push (cb) ->
|
||||
DockerRunner.destroyContainer name, id, false, () -> cb()
|
||||
# Ignore errors because some containers get stuck but
|
||||
# will be destroyed next time
|
||||
async.series jobs, callback
|
||||
|
||||
startContainerMonitor: () ->
|
||||
logger.log {maxAge: DockerRunner.MAX_CONTAINER_AGE}, "starting container expiry"
|
||||
# randomise the start time
|
||||
randomDelay = Math.floor(Math.random() * 5 * 60 * 1000)
|
||||
setTimeout () ->
|
||||
setInterval () ->
|
||||
DockerRunner.destroyOldContainers()
|
||||
, oneHour = 60 * 60 * 1000
|
||||
, randomDelay
|
||||
|
||||
DockerRunner.startContainerMonitor()
|
||||
@@ -2,30 +2,30 @@ Path = require "path"
|
||||
Settings = require "settings-sharelatex"
|
||||
logger = require "logger-sharelatex"
|
||||
Metrics = require "./Metrics"
|
||||
CommandRunner = require(Settings.clsi?.commandRunner or "./CommandRunner")
|
||||
CommandRunner = require "./CommandRunner"
|
||||
|
||||
ProcessTable = {} # table of currently running jobs (pids or docker container names)
|
||||
|
||||
module.exports = LatexRunner =
|
||||
runLatex: (project_id, options, callback = (error) ->) ->
|
||||
{directory, mainFile, compiler, timeout, image, environment} = options
|
||||
{directory, mainFile, compiler, timeout, image, environment, flags} = options
|
||||
compiler ||= "pdflatex"
|
||||
timeout ||= 60000 # milliseconds
|
||||
|
||||
logger.log directory: directory, compiler: compiler, timeout: timeout, mainFile: mainFile, environment: environment, "starting compile"
|
||||
logger.log directory: directory, compiler: compiler, timeout: timeout, mainFile: mainFile, environment: environment, flags:flags, "starting compile"
|
||||
|
||||
# We want to run latexmk on the tex file which we will automatically
|
||||
# generate from the Rtex/Rmd/md file.
|
||||
mainFile = mainFile.replace(/\.(Rtex|md|Rmd)$/, ".tex")
|
||||
|
||||
if compiler == "pdflatex"
|
||||
command = LatexRunner._pdflatexCommand mainFile
|
||||
command = LatexRunner._pdflatexCommand mainFile, flags
|
||||
else if compiler == "latex"
|
||||
command = LatexRunner._latexCommand mainFile
|
||||
command = LatexRunner._latexCommand mainFile, flags
|
||||
else if compiler == "xelatex"
|
||||
command = LatexRunner._xelatexCommand mainFile
|
||||
command = LatexRunner._xelatexCommand mainFile, flags
|
||||
else if compiler == "lualatex"
|
||||
command = LatexRunner._lualatexCommand mainFile
|
||||
command = LatexRunner._lualatexCommand mainFile, flags
|
||||
else
|
||||
return callback new Error("unknown compiler: #{compiler}")
|
||||
|
||||
@@ -63,31 +63,32 @@ module.exports = LatexRunner =
|
||||
else
|
||||
CommandRunner.kill ProcessTable[id], callback
|
||||
|
||||
_latexmkBaseCommand: (Settings?.clsi?.latexmkCommandPrefix || []).concat([
|
||||
"latexmk", "-cd", "-f", "-jobname=output", "-auxdir=$COMPILE_DIR", "-outdir=$COMPILE_DIR",
|
||||
"-synctex=1","-interaction=batchmode"
|
||||
])
|
||||
_latexmkBaseCommand: (flags) ->
|
||||
args = ["latexmk", "-cd", "-f", "-jobname=output", "-auxdir=$COMPILE_DIR", "-outdir=$COMPILE_DIR", "-synctex=1","-interaction=batchmode"]
|
||||
if flags
|
||||
args = args.concat(flags)
|
||||
(Settings?.clsi?.latexmkCommandPrefix || []).concat(args)
|
||||
|
||||
_pdflatexCommand: (mainFile) ->
|
||||
LatexRunner._latexmkBaseCommand.concat [
|
||||
_pdflatexCommand: (mainFile, flags) ->
|
||||
LatexRunner._latexmkBaseCommand(flags).concat [
|
||||
"-pdf",
|
||||
Path.join("$COMPILE_DIR", mainFile)
|
||||
]
|
||||
|
||||
_latexCommand: (mainFile) ->
|
||||
LatexRunner._latexmkBaseCommand.concat [
|
||||
_latexCommand: (mainFile, flags) ->
|
||||
LatexRunner._latexmkBaseCommand(flags).concat [
|
||||
"-pdfdvi",
|
||||
Path.join("$COMPILE_DIR", mainFile)
|
||||
]
|
||||
|
||||
_xelatexCommand: (mainFile) ->
|
||||
LatexRunner._latexmkBaseCommand.concat [
|
||||
_xelatexCommand: (mainFile, flags) ->
|
||||
LatexRunner._latexmkBaseCommand(flags).concat [
|
||||
"-xelatex",
|
||||
Path.join("$COMPILE_DIR", mainFile)
|
||||
]
|
||||
|
||||
_lualatexCommand: (mainFile) ->
|
||||
LatexRunner._latexmkBaseCommand.concat [
|
||||
_lualatexCommand: (mainFile, flags) ->
|
||||
LatexRunner._latexmkBaseCommand(flags).concat [
|
||||
"-lualatex",
|
||||
Path.join("$COMPILE_DIR", mainFile)
|
||||
]
|
||||
|
||||
48
app/coffee/LocalCommandRunner.coffee
Normal file
48
app/coffee/LocalCommandRunner.coffee
Normal file
@@ -0,0 +1,48 @@
|
||||
spawn = require("child_process").spawn
|
||||
logger = require "logger-sharelatex"
|
||||
|
||||
logger.info "using standard command runner"
|
||||
|
||||
module.exports = CommandRunner =
|
||||
run: (project_id, command, directory, image, timeout, environment, callback = (error) ->) ->
|
||||
command = (arg.toString().replace('$COMPILE_DIR', directory) for arg in command)
|
||||
logger.log project_id: project_id, command: command, directory: directory, "running command"
|
||||
logger.warn "timeouts and sandboxing are not enabled with CommandRunner"
|
||||
|
||||
# merge environment settings
|
||||
env = {}
|
||||
env[key] = value for key, value of process.env
|
||||
env[key] = value for key, value of environment
|
||||
|
||||
# run command as detached process so it has its own process group (which can be killed if needed)
|
||||
proc = spawn command[0], command.slice(1), cwd: directory, env: env
|
||||
|
||||
stdout = ""
|
||||
proc.stdout.on "data", (data)->
|
||||
stdout += data
|
||||
|
||||
proc.on "error", (err)->
|
||||
logger.err err:err, project_id:project_id, command: command, directory: directory, "error running command"
|
||||
callback(err)
|
||||
|
||||
proc.on "close", (code, signal) ->
|
||||
logger.info code:code, signal:signal, project_id:project_id, "command exited"
|
||||
if signal is 'SIGTERM' # signal from kill method below
|
||||
err = new Error("terminated")
|
||||
err.terminated = true
|
||||
return callback(err)
|
||||
else if code is 1 # exit status from chktex
|
||||
err = new Error("exited")
|
||||
err.code = code
|
||||
return callback(err)
|
||||
else
|
||||
callback(null, {"stdout": stdout})
|
||||
|
||||
return proc.pid # return process id to allow job to be killed if necessary
|
||||
|
||||
kill: (pid, callback = (error) ->) ->
|
||||
try
|
||||
process.kill -pid # kill all processes in group
|
||||
catch err
|
||||
return callback(err)
|
||||
callback()
|
||||
@@ -2,7 +2,8 @@ Settings = require('settings-sharelatex')
|
||||
logger = require "logger-sharelatex"
|
||||
Lockfile = require('lockfile') # from https://github.com/npm/lockfile
|
||||
Errors = require "./Errors"
|
||||
|
||||
fs = require("fs")
|
||||
Path = require("path")
|
||||
module.exports = LockManager =
|
||||
LOCK_TEST_INTERVAL: 1000 # 50ms between each test of the lock
|
||||
MAX_LOCK_WAIT_TIME: 15000 # 10s maximum time to spend trying to get the lock
|
||||
@@ -14,8 +15,15 @@ module.exports = LockManager =
|
||||
pollPeriod: @LOCK_TEST_INTERVAL
|
||||
stale: @LOCK_STALE
|
||||
Lockfile.lock path, lockOpts, (error) ->
|
||||
return callback new Errors.AlreadyCompilingError("compile in progress") if error?.code is 'EEXIST'
|
||||
return callback(error) if error?
|
||||
if error?.code is 'EEXIST'
|
||||
return callback new Errors.AlreadyCompilingError("compile in progress")
|
||||
else if error?
|
||||
fs.lstat path, (statLockErr, statLock)->
|
||||
fs.lstat Path.dirname(path), (statDirErr, statDir)->
|
||||
fs.readdir Path.dirname(path), (readdirErr, readdirDir)->
|
||||
logger.err error:error, path:path, statLock:statLock, statLockErr:statLockErr, statDir:statDir, statDirErr: statDirErr, readdirErr:readdirErr, readdirDir:readdirDir, "unable to get lock"
|
||||
return callback(error)
|
||||
else
|
||||
runner (error1, args...) ->
|
||||
Lockfile.unlock path, (error2) ->
|
||||
error = error1 or error2
|
||||
|
||||
@@ -10,8 +10,6 @@ module.exports = OutputFileFinder =
|
||||
for resource in resources
|
||||
incomingResources[resource.path] = true
|
||||
|
||||
logger.log directory: directory, "getting output files"
|
||||
|
||||
OutputFileFinder._getAllFiles directory, (error, allFiles = []) ->
|
||||
if error?
|
||||
logger.err err:error, "error finding all output files"
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
UrlCache = require "./UrlCache"
|
||||
CompileManager = require "./CompileManager"
|
||||
db = require "./db"
|
||||
dbQueue = require "./DbQueue"
|
||||
async = require "async"
|
||||
logger = require "logger-sharelatex"
|
||||
oneDay = 24 * 60 * 60 * 1000
|
||||
@@ -11,14 +12,17 @@ module.exports = ProjectPersistenceManager =
|
||||
EXPIRY_TIMEOUT: Settings.project_cache_length_ms || oneDay * 2.5
|
||||
|
||||
markProjectAsJustAccessed: (project_id, callback = (error) ->) ->
|
||||
job = (cb)->
|
||||
db.Project.findOrCreate(where: {project_id: project_id})
|
||||
.spread(
|
||||
(project, created) ->
|
||||
project.updateAttributes(lastAccessed: new Date())
|
||||
.then(() -> callback())
|
||||
.error callback
|
||||
.then(() -> cb())
|
||||
.error cb
|
||||
)
|
||||
.error callback
|
||||
.error cb
|
||||
dbQueue.queue.push(job, callback)
|
||||
|
||||
|
||||
clearExpiredProjects: (callback = (error) ->) ->
|
||||
ProjectPersistenceManager._findExpiredProjectIds (error, project_ids) ->
|
||||
@@ -47,20 +51,34 @@ module.exports = ProjectPersistenceManager =
|
||||
clearProjectFromCache: (project_id, callback = (error) ->) ->
|
||||
logger.log project_id: project_id, "clearing project from cache"
|
||||
UrlCache.clearProject project_id, (error) ->
|
||||
return callback(error) if error?
|
||||
if error?
|
||||
logger.err error:error, project_id: project_id, "error clearing project from cache"
|
||||
return callback(error)
|
||||
ProjectPersistenceManager._clearProjectFromDatabase project_id, (error) ->
|
||||
return callback(error) if error?
|
||||
callback()
|
||||
if error?
|
||||
logger.err error:error, project_id:project_id, "error clearing project from database"
|
||||
callback(error)
|
||||
|
||||
_clearProjectFromDatabase: (project_id, callback = (error) ->) ->
|
||||
logger.log project_id:project_id, "clearing project from database"
|
||||
job = (cb)->
|
||||
db.Project.destroy(where: {project_id: project_id})
|
||||
.then(() -> callback())
|
||||
.error callback
|
||||
.then(() -> cb())
|
||||
.error cb
|
||||
dbQueue.queue.push(job, callback)
|
||||
|
||||
|
||||
_findExpiredProjectIds: (callback = (error, project_ids) ->) ->
|
||||
db.Project.findAll(where: ["lastAccessed < ?", new Date(Date.now() - ProjectPersistenceManager.EXPIRY_TIMEOUT)])
|
||||
job = (cb)->
|
||||
keepProjectsFrom = new Date(Date.now() - ProjectPersistenceManager.EXPIRY_TIMEOUT)
|
||||
q = {}
|
||||
q[db.op.lt] = keepProjectsFrom
|
||||
db.Project.findAll(where:{lastAccessed:q})
|
||||
.then((projects) ->
|
||||
callback null, projects.map((project) -> project.project_id)
|
||||
).error callback
|
||||
cb null, projects.map((project) -> project.project_id)
|
||||
).error cb
|
||||
|
||||
dbQueue.queue.push(job, callback)
|
||||
|
||||
|
||||
logger.log {EXPIRY_TIMEOUT: ProjectPersistenceManager.EXPIRY_TIMEOUT}, "project assets kept timeout"
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
settings = require("settings-sharelatex")
|
||||
|
||||
module.exports = RequestParser =
|
||||
VALID_COMPILERS: ["pdflatex", "latex", "xelatex", "lualatex"]
|
||||
MAX_TIMEOUT: 300
|
||||
MAX_TIMEOUT: 600
|
||||
|
||||
parse: (body, callback = (error, data) ->) ->
|
||||
response = {}
|
||||
@@ -31,6 +33,10 @@ module.exports = RequestParser =
|
||||
response.check = @_parseAttribute "check",
|
||||
compile.options.check,
|
||||
type: "string"
|
||||
response.flags = @_parseAttribute "flags",
|
||||
compile.options.flags,
|
||||
default: [],
|
||||
type: "object"
|
||||
|
||||
# The syncType specifies whether the request contains all
|
||||
# resources (full) or only those resources to be updated
|
||||
|
||||
@@ -78,7 +78,15 @@ module.exports = ResourceWriter =
|
||||
should_delete = true
|
||||
if path.match(/^output\./) or path.match(/\.aux$/) or path.match(/^cache\//) # knitr cache
|
||||
should_delete = false
|
||||
if path.match(/^output-.*/) # Tikz cached figures
|
||||
if path.match(/^output-.*/) # Tikz cached figures (default case)
|
||||
should_delete = false
|
||||
if path.match(/\.(pdf|dpth|md5)$/) # Tikz cached figures (by extension)
|
||||
should_delete = false
|
||||
if path.match(/\.(pygtex|pygstyle)$/) or path.match(/(^|\/)_minted-[^\/]+\//) # minted files/directory
|
||||
should_delete = false
|
||||
if path.match(/\.md\.tex$/) or path.match(/(^|\/)_markdown_[^\/]+\//) # markdown files/directory
|
||||
should_delete = false
|
||||
if path.match(/-eps-converted-to\.pdf$/) # Epstopdf generated files
|
||||
should_delete = false
|
||||
if path == "output.pdf" or path == "output.dvi" or path == "output.log" or path == "output.xdv"
|
||||
should_delete = true
|
||||
@@ -120,7 +128,11 @@ module.exports = ResourceWriter =
|
||||
logger.err err:err, project_id:project_id, path:path, resource_url:resource.url, modified:resource.modified, "error downloading file for resources"
|
||||
callback() #try and continue compiling even if http resource can not be downloaded at this time
|
||||
else
|
||||
process = require("process")
|
||||
fs.writeFile path, resource.content, callback
|
||||
try
|
||||
result = fs.lstatSync(path)
|
||||
catch e
|
||||
|
||||
checkPath: (basePath, resourcePath, callback) ->
|
||||
path = Path.normalize(Path.join(basePath, resourcePath))
|
||||
|
||||
@@ -4,32 +4,34 @@ ResourceWriter = require "./ResourceWriter"
|
||||
SafeReader = require "./SafeReader"
|
||||
logger = require "logger-sharelatex"
|
||||
|
||||
# for \tikzexternalize to work the main file needs to match the
|
||||
# for \tikzexternalize or pstool to work the main file needs to match the
|
||||
# jobname. Since we set the -jobname to output, we have to create a
|
||||
# copy of the main file as 'output.tex'.
|
||||
|
||||
module.exports = TikzManager =
|
||||
|
||||
checkMainFile: (compileDir, mainFile, resources, callback = (error, usesTikzExternalize) ->) ->
|
||||
checkMainFile: (compileDir, mainFile, resources, callback = (error, needsMainFile) ->) ->
|
||||
# if there's already an output.tex file, we don't want to touch it
|
||||
for resource in resources
|
||||
if resource.path is "output.tex"
|
||||
logger.log compileDir: compileDir, mainFile: mainFile, "output.tex already in resources"
|
||||
return callback(null, false)
|
||||
# if there's no output.tex, see if we are using tikz/pgf in the main file
|
||||
# if there's no output.tex, see if we are using tikz/pgf or pstool in the main file
|
||||
ResourceWriter.checkPath compileDir, mainFile, (error, path) ->
|
||||
return callback(error) if error?
|
||||
SafeReader.readFile path, 65536, "utf8", (error, content) ->
|
||||
return callback(error) if error?
|
||||
usesTikzExternalize = content?.indexOf("\\tikzexternalize") >= 0
|
||||
logger.log compileDir: compileDir, mainFile: mainFile, usesTikzExternalize:usesTikzExternalize, "checked for tikzexternalize"
|
||||
callback null, usesTikzExternalize
|
||||
usesPsTool = content?.indexOf("{pstool}") >= 0
|
||||
logger.log compileDir: compileDir, mainFile: mainFile, usesTikzExternalize:usesTikzExternalize, usesPsTool: usesPsTool, "checked for packages needing main file as output.tex"
|
||||
needsMainFile = (usesTikzExternalize || usesPsTool)
|
||||
callback null, needsMainFile
|
||||
|
||||
injectOutputFile: (compileDir, mainFile, callback = (error) ->) ->
|
||||
ResourceWriter.checkPath compileDir, mainFile, (error, path) ->
|
||||
return callback(error) if error?
|
||||
fs.readFile path, "utf8", (error, content) ->
|
||||
return callback(error) if error?
|
||||
logger.log compileDir: compileDir, mainFile: mainFile, "copied file to output.tex for tikz"
|
||||
logger.log compileDir: compileDir, mainFile: mainFile, "copied file to output.tex as project uses packages which require it"
|
||||
# use wx flag to ensure that output file does not already exist
|
||||
fs.writeFile Path.join(compileDir, "output.tex"), content, {flag:'wx'}, callback
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
db = require("./db")
|
||||
dbQueue = require "./DbQueue"
|
||||
UrlFetcher = require("./UrlFetcher")
|
||||
Settings = require("settings-sharelatex")
|
||||
crypto = require("crypto")
|
||||
@@ -51,7 +52,6 @@ module.exports = UrlCache =
|
||||
_doesUrlNeedDownloading: (project_id, url, lastModified, callback = (error, needsDownloading) ->) ->
|
||||
if !lastModified?
|
||||
return callback null, true
|
||||
|
||||
UrlCache._findUrlDetails project_id, url, (error, urlDetails) ->
|
||||
return callback(error) if error?
|
||||
if !urlDetails? or !urlDetails.lastModified? or urlDetails.lastModified.getTime() < lastModified.getTime()
|
||||
@@ -94,32 +94,41 @@ module.exports = UrlCache =
|
||||
return callback()
|
||||
|
||||
_findUrlDetails: (project_id, url, callback = (error, urlDetails) ->) ->
|
||||
job = (cb)->
|
||||
db.UrlCache.find(where: { url: url, project_id: project_id })
|
||||
.then((urlDetails) -> callback null, urlDetails)
|
||||
.error callback
|
||||
.then((urlDetails) -> cb null, urlDetails)
|
||||
.error cb
|
||||
dbQueue.queue.push job, callback
|
||||
|
||||
_updateOrCreateUrlDetails: (project_id, url, lastModified, callback = (error) ->) ->
|
||||
job = (cb)->
|
||||
db.UrlCache.findOrCreate(where: {url: url, project_id: project_id})
|
||||
.spread(
|
||||
(urlDetails, created) ->
|
||||
urlDetails.updateAttributes(lastModified: lastModified)
|
||||
.then(() -> callback())
|
||||
.error(callback)
|
||||
.then(() -> cb())
|
||||
.error(cb)
|
||||
)
|
||||
.error callback
|
||||
.error cb
|
||||
dbQueue.queue.push(job, callback)
|
||||
|
||||
_clearUrlDetails: (project_id, url, callback = (error) ->) ->
|
||||
job = (cb)->
|
||||
db.UrlCache.destroy(where: {url: url, project_id: project_id})
|
||||
.then(() -> callback null)
|
||||
.error callback
|
||||
.then(() -> cb null)
|
||||
.error cb
|
||||
dbQueue.queue.push(job, callback)
|
||||
|
||||
|
||||
_findAllUrlsInProject: (project_id, callback = (error, urls) ->) ->
|
||||
job = (cb)->
|
||||
db.UrlCache.findAll(where: { project_id: project_id })
|
||||
.then(
|
||||
(urlEntries) ->
|
||||
callback null, urlEntries.map((entry) -> entry.url)
|
||||
cb null, urlEntries.map((entry) -> entry.url)
|
||||
)
|
||||
.error callback
|
||||
.error cb
|
||||
dbQueue.queue.push(job, callback)
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
request = require("request").defaults(jar: false)
|
||||
fs = require("fs")
|
||||
logger = require "logger-sharelatex"
|
||||
settings = require("settings-sharelatex")
|
||||
URL = require('url');
|
||||
|
||||
oneMinute = 60 * 1000
|
||||
|
||||
@@ -11,6 +13,9 @@ module.exports = UrlFetcher =
|
||||
_callback(error)
|
||||
_callback = () ->
|
||||
|
||||
if settings.filestoreDomainOveride?
|
||||
p = URL.parse(url).path
|
||||
url = "#{settings.filestoreDomainOveride}#{p}"
|
||||
timeoutHandler = setTimeout () ->
|
||||
timeoutHandler = null
|
||||
logger.error url:url, filePath: filePath, "Timed out downloading file to cache"
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
Sequelize = require("sequelize")
|
||||
Settings = require("settings-sharelatex")
|
||||
_ = require("underscore")
|
||||
logger = require "logger-sharelatex"
|
||||
|
||||
options = _.extend {logging:false}, Settings.mysql.clsi
|
||||
|
||||
logger.log dbPath:Settings.mysql.clsi.storage, "connecting to db"
|
||||
|
||||
sequelize = new Sequelize(
|
||||
Settings.mysql.clsi.database,
|
||||
Settings.mysql.clsi.username,
|
||||
@@ -11,6 +14,12 @@ sequelize = new Sequelize(
|
||||
options
|
||||
)
|
||||
|
||||
if Settings.mysql.clsi.dialect == "sqlite"
|
||||
logger.log "running PRAGMA journal_mode=WAL;"
|
||||
sequelize.query("PRAGMA journal_mode=WAL;")
|
||||
sequelize.query("PRAGMA synchronous=OFF;")
|
||||
sequelize.query("PRAGMA read_uncommitted = true;")
|
||||
|
||||
module.exports =
|
||||
UrlCache: sequelize.define("UrlCache", {
|
||||
url: Sequelize.STRING
|
||||
@@ -32,5 +41,15 @@ module.exports =
|
||||
]
|
||||
})
|
||||
|
||||
sync: () -> sequelize.sync()
|
||||
op: Sequelize.Op
|
||||
|
||||
sync: () ->
|
||||
logger.log dbPath:Settings.mysql.clsi.storage, "syncing db schema"
|
||||
sequelize.sync()
|
||||
.then(->
|
||||
logger.log "db sync complete"
|
||||
).catch((err)->
|
||||
console.log err, "error syncing"
|
||||
)
|
||||
|
||||
|
||||
|
||||
4
bin/acceptance_test
Normal file
4
bin/acceptance_test
Normal file
@@ -0,0 +1,4 @@
|
||||
#!/bin/bash
|
||||
set -e;
|
||||
MOCHA="node_modules/.bin/mocha --recursive --reporter spec --timeout 15000"
|
||||
$MOCHA "$@"
|
||||
BIN
bin/synctex
Executable file
BIN
bin/synctex
Executable file
Binary file not shown.
9
buildscript.txt
Normal file
9
buildscript.txt
Normal file
@@ -0,0 +1,9 @@
|
||||
clsi
|
||||
--language=coffeescript
|
||||
--node-version=10.15.0
|
||||
--acceptance-creds=None
|
||||
--dependencies=mongo,redis
|
||||
--docker-repos=gcr.io/overleaf-ops
|
||||
--env-pass-through=TEXLIVE_IMAGE
|
||||
--build-target=docker
|
||||
--script-version=1.1.22
|
||||
39
cloudbuild.yaml
Normal file
39
cloudbuild.yaml
Normal file
@@ -0,0 +1,39 @@
|
||||
steps:
|
||||
- id: texlive
|
||||
name: 'gcr.io/overleaf-ops/texlive-full:2017.1'
|
||||
- id: build
|
||||
name: 'gcr.io/overleaf-ops/cloud-builder'
|
||||
args:
|
||||
- 'build'
|
||||
env:
|
||||
- 'BUILD_NUMBER=$SHORT_SHA'
|
||||
- 'BRANCH_NAME=$BRANCH_NAME'
|
||||
waitFor: ['-']
|
||||
- id: test_unit
|
||||
name: 'gcr.io/overleaf-ops/cloud-builder'
|
||||
args:
|
||||
- 'test_unit'
|
||||
env:
|
||||
- 'DOCKER_COMPOSE_FLAGS=-f docker-compose.ci.yml'
|
||||
- 'BUILD_NUMBER=$SHORT_SHA'
|
||||
- 'BRANCH_NAME=$BRANCH_NAME'
|
||||
waitFor:
|
||||
- build
|
||||
- id: test_acceptance
|
||||
name: 'gcr.io/overleaf-ops/cloud-builder'
|
||||
args:
|
||||
- 'test_acceptance'
|
||||
env:
|
||||
- 'DOCKER_COMPOSE_FLAGS=-f docker-compose.ci.yml'
|
||||
- 'BUILD_NUMBER=$SHORT_SHA'
|
||||
- 'BRANCH_NAME=$BRANCH_NAME'
|
||||
- 'TEXLIVE_IMAGE=gcr.io/overleaf-ops/texlive-full:2017.1'
|
||||
waitFor:
|
||||
- build
|
||||
- texlive
|
||||
images:
|
||||
- 'gcr.io/$PROJECT_ID/clsi:${BRANCH_NAME}-${SHORT_SHA}'
|
||||
timeout: 1800s
|
||||
options:
|
||||
diskSizeGb: 200
|
||||
machineType: 'N1_HIGHCPU_8'
|
||||
@@ -7,9 +7,15 @@ module.exports =
|
||||
clsi:
|
||||
database: "clsi"
|
||||
username: "clsi"
|
||||
password: null
|
||||
dialect: "sqlite"
|
||||
storage: Path.resolve(__dirname + "/../db.sqlite")
|
||||
storage: process.env["SQLITE_PATH"] or Path.resolve(__dirname + "/../db.sqlite")
|
||||
pool:
|
||||
max: 1
|
||||
min: 1
|
||||
retry:
|
||||
max: 10
|
||||
|
||||
compileSizeLimit: process.env["COMPILE_SIZE_LIMIT"] or "7mb"
|
||||
|
||||
path:
|
||||
compilesDir: Path.resolve(__dirname + "/../compiles")
|
||||
@@ -21,18 +27,28 @@ module.exports =
|
||||
port: 3013
|
||||
host: process.env["LISTEN_ADDRESS"] or "localhost"
|
||||
|
||||
|
||||
load_balancer_agent:
|
||||
report_load:true
|
||||
load_port: 3048
|
||||
local_port: 3049
|
||||
apis:
|
||||
clsi:
|
||||
url: "http://localhost:3013"
|
||||
url: "http://#{process.env['CLSI_HOST'] or 'localhost'}:3013"
|
||||
|
||||
smokeTest: false
|
||||
|
||||
smokeTest: process.env["SMOKE_TEST"] or false
|
||||
project_cache_length_ms: 1000 * 60 * 60 * 24
|
||||
parallelFileDownloads:1
|
||||
parallelFileDownloads: process.env["FILESTORE_PARALLEL_FILE_DOWNLOADS"] or 1
|
||||
parallelSqlQueryLimit: process.env["FILESTORE_PARALLEL_SQL_QUERY_LIMIT"] or 1
|
||||
filestoreDomainOveride: process.env["FILESTORE_DOMAIN_OVERRIDE"]
|
||||
texliveImageNameOveride: process.env["TEX_LIVE_IMAGE_NAME_OVERRIDE"]
|
||||
sentry:
|
||||
dsn: process.env['SENTRY_DSN']
|
||||
|
||||
if process.env["COMMAND_RUNNER"]
|
||||
|
||||
if process.env["DOCKER_RUNNER"]
|
||||
module.exports.clsi =
|
||||
commandRunner: process.env["COMMAND_RUNNER"]
|
||||
dockerRunner: process.env["DOCKER_RUNNER"] == "true"
|
||||
docker:
|
||||
image: process.env["TEXLIVE_IMAGE"] or "quay.io/sharelatex/texlive-full:2017.1"
|
||||
env:
|
||||
@@ -41,4 +57,15 @@ if process.env["COMMAND_RUNNER"]
|
||||
user: process.env["TEXLIVE_IMAGE_USER"] or "tex"
|
||||
expireProjectAfterIdleMs: 24 * 60 * 60 * 1000
|
||||
checkProjectsIntervalMs: 10 * 60 * 1000
|
||||
|
||||
try
|
||||
seccomp_profile_path = Path.resolve(__dirname + "/../seccomp/clsi-profile.json")
|
||||
module.exports.clsi.docker.seccomp_profile = JSON.stringify(JSON.parse(require("fs").readFileSync(seccomp_profile_path)))
|
||||
catch error
|
||||
console.log error, "could not load seccom profile from #{seccomp_profile_path}"
|
||||
|
||||
module.exports.path.synctexBaseDir = -> "/compile"
|
||||
|
||||
module.exports.path.sandboxedCompilesHostDir = process.env["COMPILES_HOST_DIR"]
|
||||
|
||||
module.exports.path.synctexBinHostPath = process.env["SYNCTEX_BIN_HOST_PATH"]
|
||||
|
||||
5
debug
Executable file
5
debug
Executable file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
echo "hello world"
|
||||
sleep 3
|
||||
echo "awake"
|
||||
/opt/synctex pdf /compile/output.pdf 1 100 200
|
||||
32
docker-compose-config.yml
Normal file
32
docker-compose-config.yml
Normal file
@@ -0,0 +1,32 @@
|
||||
version: "2"
|
||||
|
||||
services:
|
||||
dev:
|
||||
environment:
|
||||
TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1
|
||||
TEXLIVE_IMAGE_USER: "tex"
|
||||
SHARELATEX_CONFIG: /app/config/settings.defaults.coffee
|
||||
DOCKER_RUNNER: "true"
|
||||
COMPILES_HOST_DIR: $PWD/compiles
|
||||
SYNCTEX_BIN_HOST_PATH: $PWD/bin/synctex
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
- ./compiles:/app/compiles
|
||||
- ./cache:/app/cache
|
||||
- ./bin/synctex:/app/bin/synctex
|
||||
|
||||
|
||||
ci:
|
||||
environment:
|
||||
TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1
|
||||
TEXLIVE_IMAGE_USER: "tex"
|
||||
SHARELATEX_CONFIG: /app/config/settings.defaults.coffee
|
||||
DOCKER_RUNNER: "true"
|
||||
COMPILES_HOST_DIR: $PWD/compiles
|
||||
SYNCTEX_BIN_HOST_PATH: $PWD/bin/synctex
|
||||
SQLITE_PATH: /app/compiles/db.sqlite
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock:rw
|
||||
- ./compiles:/app/compiles
|
||||
- ./cache:/app/cache
|
||||
- ./bin/synctex:/app/bin/synctex
|
||||
49
docker-compose.ci.yml
Normal file
49
docker-compose.ci.yml
Normal file
@@ -0,0 +1,49 @@
|
||||
# This file was auto-generated, do not edit it directly.
|
||||
# Instead run bin/update_build_scripts from
|
||||
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||
# Version: 1.1.22
|
||||
|
||||
version: "2"
|
||||
|
||||
services:
|
||||
test_unit:
|
||||
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
||||
command: npm run test:unit:_run
|
||||
environment:
|
||||
NODE_ENV: test
|
||||
|
||||
|
||||
test_acceptance:
|
||||
build: .
|
||||
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
||||
extends:
|
||||
file: docker-compose-config.yml
|
||||
service: ci
|
||||
environment:
|
||||
ELASTIC_SEARCH_DSN: es:9200
|
||||
REDIS_HOST: redis
|
||||
MONGO_HOST: mongo
|
||||
POSTGRES_HOST: postgres
|
||||
MOCHA_GREP: ${MOCHA_GREP}
|
||||
NODE_ENV: test
|
||||
TEXLIVE_IMAGE:
|
||||
depends_on:
|
||||
- mongo
|
||||
- redis
|
||||
command: npm run test:acceptance:_run
|
||||
|
||||
|
||||
|
||||
tar:
|
||||
build: .
|
||||
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
||||
volumes:
|
||||
- ./:/tmp/build/
|
||||
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
|
||||
user: root
|
||||
|
||||
redis:
|
||||
image: redis
|
||||
|
||||
mongo:
|
||||
image: mongo:3.4
|
||||
56
docker-compose.yml
Normal file
56
docker-compose.yml
Normal file
@@ -0,0 +1,56 @@
|
||||
# This file was auto-generated, do not edit it directly.
|
||||
# Instead run bin/update_build_scripts from
|
||||
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||
# Version: 1.1.22
|
||||
|
||||
version: "2"
|
||||
|
||||
services:
|
||||
test_unit:
|
||||
build: .
|
||||
volumes:
|
||||
- .:/app
|
||||
working_dir: /app
|
||||
environment:
|
||||
MOCHA_GREP: ${MOCHA_GREP}
|
||||
NODE_ENV: test
|
||||
command: npm run test:unit
|
||||
|
||||
test_acceptance:
|
||||
build: .
|
||||
volumes:
|
||||
- .:/app
|
||||
working_dir: /app
|
||||
extends:
|
||||
file: docker-compose-config.yml
|
||||
service: dev
|
||||
environment:
|
||||
ELASTIC_SEARCH_DSN: es:9200
|
||||
REDIS_HOST: redis
|
||||
MONGO_HOST: mongo
|
||||
POSTGRES_HOST: postgres
|
||||
MOCHA_GREP: ${MOCHA_GREP}
|
||||
LOG_LEVEL: ERROR
|
||||
NODE_ENV: test
|
||||
depends_on:
|
||||
- mongo
|
||||
- redis
|
||||
command: npm run test:acceptance
|
||||
|
||||
|
||||
|
||||
tar:
|
||||
build: .
|
||||
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
||||
volumes:
|
||||
- ./:/tmp/build/
|
||||
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
|
||||
user: root
|
||||
|
||||
redis:
|
||||
image: redis
|
||||
|
||||
mongo:
|
||||
image: mongo:3.4
|
||||
|
||||
|
||||
6
entrypoint.sh
Normal file
6
entrypoint.sh
Normal file
@@ -0,0 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -o pipefail
|
||||
|
||||
/app/inner-entrypoint.sh "$@" 2>&1 | ts
|
||||
|
||||
27
inner-entrypoint.sh
Executable file
27
inner-entrypoint.sh
Executable file
@@ -0,0 +1,27 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -x
|
||||
|
||||
date
|
||||
echo "Changing permissions of /var/run/docker.sock for sibling containers"
|
||||
ls -al /var/run/docker.sock
|
||||
docker --version
|
||||
cat /etc/passwd
|
||||
|
||||
DOCKER_GROUP=$(stat -c '%g' /var/run/docker.sock)
|
||||
groupadd --non-unique --gid ${DOCKER_GROUP} dockeronhost
|
||||
usermod -aG dockeronhost node
|
||||
|
||||
mkdir -p /app/cache
|
||||
chown -R node:node /app/cache
|
||||
|
||||
mkdir -p /app/compiles
|
||||
chown -R node:node /app/compiles
|
||||
|
||||
chown -R node:node /app/bin/synctex
|
||||
mkdir -p /app/test/acceptance/fixtures/tmp/
|
||||
chown -R node:node /app
|
||||
|
||||
chown -R node:node /app/bin
|
||||
|
||||
exec runuser -u node -- "$@"
|
||||
4
install_deps.sh
Executable file
4
install_deps.sh
Executable file
@@ -0,0 +1,4 @@
|
||||
/bin/sh
|
||||
wget -qO- https://get.docker.com/ | sh
|
||||
apt-get install poppler-utils vim ghostscript --yes
|
||||
npm rebuild
|
||||
41
kube.yaml
Normal file
41
kube.yaml
Normal file
@@ -0,0 +1,41 @@
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: clsi
|
||||
namespace: default
|
||||
spec:
|
||||
type: LoadBalancer
|
||||
ports:
|
||||
- port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
selector:
|
||||
run: clsi
|
||||
---
|
||||
apiVersion: extensions/v1beta1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: clsi
|
||||
namespace: default
|
||||
spec:
|
||||
replicas: 2
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
run: clsi
|
||||
spec:
|
||||
containers:
|
||||
- name: clsi
|
||||
image: gcr.io/henry-terraform-admin/clsi
|
||||
imagePullPolicy: Always
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: status
|
||||
port: 80
|
||||
periodSeconds: 5
|
||||
initialDelaySeconds: 0
|
||||
failureThreshold: 3
|
||||
successThreshold: 1
|
||||
|
||||
|
||||
|
||||
19
nodemon.json
Normal file
19
nodemon.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"ignore": [
|
||||
".git",
|
||||
"node_modules/"
|
||||
],
|
||||
"verbose": true,
|
||||
"legacyWatch": true,
|
||||
"execMap": {
|
||||
"js": "npm run start"
|
||||
},
|
||||
|
||||
"watch": [
|
||||
"app/coffee/",
|
||||
"app.coffee",
|
||||
"config/"
|
||||
],
|
||||
"ext": "coffee"
|
||||
|
||||
}
|
||||
2948
npm-shrinkwrap.json
generated
Normal file
2948
npm-shrinkwrap.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
46
package.json
46
package.json
@@ -7,46 +7,48 @@
|
||||
"url": "https://github.com/sharelatex/clsi-sharelatex.git"
|
||||
},
|
||||
"scripts": {
|
||||
"compile:app": "coffee -o app/js -c app/coffee && coffee -c app.coffee",
|
||||
"start": "npm run compile:app && node app.js"
|
||||
"compile:app": "([ -e app/coffee ] && coffee -m $COFFEE_OPTIONS -o app/js -c app/coffee || echo 'No CoffeeScript folder to compile') && ( [ -e app.coffee ] && coffee -m $COFFEE_OPTIONS -c app.coffee || echo 'No CoffeeScript app to compile')",
|
||||
"start": "npm run compile:app && node $NODE_APP_OPTIONS app.js",
|
||||
"test:acceptance:_run": "mocha --recursive --reporter spec --timeout 30000 --exit $@ test/acceptance/js",
|
||||
"test:acceptance": "npm run compile:app && npm run compile:acceptance_tests && npm run test:acceptance:_run -- --grep=$MOCHA_GREP",
|
||||
"test:unit:_run": "mocha --recursive --reporter spec --exit $@ test/unit/js",
|
||||
"test:unit": "npm run compile:app && npm run compile:unit_tests && npm run test:unit:_run -- --grep=$MOCHA_GREP",
|
||||
"compile:unit_tests": "[ ! -e test/unit/coffee ] && echo 'No unit tests to compile' || coffee -o test/unit/js -c test/unit/coffee",
|
||||
"compile:acceptance_tests": "[ ! -e test/acceptance/coffee ] && echo 'No acceptance tests to compile' || coffee -o test/acceptance/js -c test/acceptance/coffee",
|
||||
"compile:all": "npm run compile:app && npm run compile:unit_tests && npm run compile:acceptance_tests && npm run compile:smoke_tests",
|
||||
"nodemon": "nodemon --config nodemon.json",
|
||||
"compile:smoke_tests": "[ ! -e test/smoke/coffee ] && echo 'No smoke tests to compile' || coffee -o test/smoke/js -c test/smoke/coffee"
|
||||
},
|
||||
"author": "James Allen <james@sharelatex.com>",
|
||||
"dependencies": {
|
||||
"async": "0.2.9",
|
||||
"body-parser": "^1.2.0",
|
||||
"dockerode": "^2.5.3",
|
||||
"express": "^4.2.0",
|
||||
"fs-extra": "^0.16.3",
|
||||
"grunt-mkdir": "^1.0.0",
|
||||
"heapdump": "^0.3.5",
|
||||
"lockfile": "^1.0.3",
|
||||
"logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.4",
|
||||
"logger-sharelatex": "^1.7.0",
|
||||
"lynx": "0.0.11",
|
||||
"metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.5.0",
|
||||
"metrics-sharelatex": "^2.2.0",
|
||||
"mkdirp": "0.3.5",
|
||||
"mysql": "2.6.2",
|
||||
"request": "^2.21.0",
|
||||
"sequelize": "^2.1.3",
|
||||
"settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0",
|
||||
"sequelize": "^4.38.0",
|
||||
"settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.1.0",
|
||||
"smoke-test-sharelatex": "git+https://github.com/sharelatex/smoke-test-sharelatex.git#v0.2.0",
|
||||
"sqlite3": "~3.1.8",
|
||||
"sqlite3": "^4.0.6",
|
||||
"underscore": "^1.8.2",
|
||||
"v8-profiler": "^5.2.4",
|
||||
"v8-profiler-node8": "^6.0.1",
|
||||
"wrench": "~1.5.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"mocha": "1.10.0",
|
||||
"coffee-script": "1.6.0",
|
||||
"chai": "~1.8.1",
|
||||
"sinon": "~1.7.3",
|
||||
"grunt": "~0.4.2",
|
||||
"grunt-contrib-coffee": "~0.7.0",
|
||||
"grunt-contrib-clean": "~0.5.0",
|
||||
"grunt-shell": "~0.6.1",
|
||||
"grunt-mocha-test": "~0.8.1",
|
||||
"sandboxed-module": "~0.3.0",
|
||||
"timekeeper": "0.0.4",
|
||||
"grunt-execute": "^0.1.5",
|
||||
"bunyan": "^0.22.1",
|
||||
"grunt-bunyan": "^0.5.0"
|
||||
"chai": "~1.8.1",
|
||||
"coffeescript": "1.6.0",
|
||||
"mocha": "^4.0.1",
|
||||
"sandboxed-module": "~0.3.0",
|
||||
"sinon": "~1.7.3",
|
||||
"timekeeper": "0.0.4"
|
||||
}
|
||||
}
|
||||
|
||||
3
patch-texlive-dockerfile
Normal file
3
patch-texlive-dockerfile
Normal file
@@ -0,0 +1,3 @@
|
||||
FROM quay.io/sharelatex/texlive-full:2017.1
|
||||
|
||||
# RUN usermod -u 1001 tex
|
||||
836
seccomp/clsi-profile.json
Normal file
836
seccomp/clsi-profile.json
Normal file
@@ -0,0 +1,836 @@
|
||||
{
|
||||
"defaultAction": "SCMP_ACT_ERRNO",
|
||||
"architectures": [
|
||||
"SCMP_ARCH_X86_64",
|
||||
"SCMP_ARCH_X86",
|
||||
"SCMP_ARCH_X32"
|
||||
],
|
||||
"syscalls": [
|
||||
{
|
||||
"name": "access",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "arch_prctl",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "brk",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "chdir",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "chmod",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "clock_getres",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "clock_gettime",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "clock_nanosleep",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "clone",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": [
|
||||
{
|
||||
"index": 0,
|
||||
"value": 2080505856,
|
||||
"valueTwo": 0,
|
||||
"op": "SCMP_CMP_MASKED_EQ"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "close",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "copy_file_range",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "creat",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "dup",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "dup2",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "dup3",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "execve",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "execveat",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "exit",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "exit_group",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "faccessat",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "fadvise64",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "fadvise64_64",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "fallocate",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "fchdir",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "fchmod",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "fchmodat",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "fcntl",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "fcntl64",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "fdatasync",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "fork",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "fstat",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "fstat64",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "fstatat64",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "fstatfs",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "fstatfs64",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "fsync",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "ftruncate",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "ftruncate64",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "futex",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "futimesat",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "getcpu",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "getcwd",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "getdents",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "getdents64",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "getegid",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "getegid32",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "geteuid",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "geteuid32",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "getgid",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "getgid32",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "getgroups",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "getgroups32",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "getpgid",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "getpgrp",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "getpid",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "getppid",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "getpriority",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "getresgid",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "getresgid32",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "getresuid",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "getresuid32",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "getrlimit",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "get_robust_list",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "getrusage",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "getsid",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "gettid",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "getuid",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "getuid32",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "ioctl",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "kill",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "_llseek",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "lseek",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "lstat",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "lstat64",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "madvise",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "mkdir",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "mkdirat",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "mmap",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "mmap2",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "mprotect",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "mremap",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "munmap",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "newfstatat",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "open",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "openat",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "pause",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "pipe",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "pipe2",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "prctl",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "pread64",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "preadv",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "prlimit64",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "pwrite64",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "pwritev",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "read",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "readlink",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "readlinkat",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "readv",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "rename",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "renameat",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "renameat2",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "restart_syscall",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "rmdir",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "rt_sigaction",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "rt_sigpending",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "rt_sigprocmask",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "rt_sigqueueinfo",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "rt_sigreturn",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "rt_sigsuspend",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "rt_sigtimedwait",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "rt_tgsigqueueinfo",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "sched_getaffinity",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "sched_getparam",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "sched_get_priority_max",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "sched_get_priority_min",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "sched_getscheduler",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "sched_rr_get_interval",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "sched_yield",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "sendfile",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "sendfile64",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "setgroups",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "setgroups32",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "set_robust_list",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "set_tid_address",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "sigaltstack",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "stat",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "stat64",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "statfs",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "statfs64",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "sync",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "sync_file_range",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "syncfs",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "sysinfo",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "tgkill",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "timer_create",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "timer_delete",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "timer_getoverrun",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "timer_gettime",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "timer_settime",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "times",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "tkill",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "truncate",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "truncate64",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "umask",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "uname",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "unlink",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "unlinkat",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "utime",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "utimensat",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "utimes",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "vfork",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "vhangup",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "wait4",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "waitid",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "write",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "writev",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "pread",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "setgid",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "setuid",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "capget",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "capset",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "fchown",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"name": "gettimeofday",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
}, {
|
||||
"name": "epoll_pwait",
|
||||
"action": "SCMP_ACT_ALLOW",
|
||||
"args": []
|
||||
}
|
||||
]
|
||||
}
|
||||
34
synctex.profile
Normal file
34
synctex.profile
Normal file
@@ -0,0 +1,34 @@
|
||||
include /etc/firejail/disable-common.inc
|
||||
include /etc/firejail/disable-devel.inc
|
||||
# include /etc/firejail/disable-mgmt.inc ## removed in 0.9.40
|
||||
# include /etc/firejail/disable-secret.inc ## removed in 0.9.40
|
||||
|
||||
read-only /bin
|
||||
blacklist /boot
|
||||
blacklist /dev
|
||||
read-only /etc
|
||||
blacklist /home # blacklisted for synctex
|
||||
read-only /lib
|
||||
read-only /lib64
|
||||
blacklist /media
|
||||
blacklist /mnt
|
||||
blacklist /opt
|
||||
blacklist /root
|
||||
read-only /run
|
||||
blacklist /sbin
|
||||
blacklist /selinux
|
||||
blacklist /src
|
||||
blacklist /sys
|
||||
read-only /usr
|
||||
|
||||
caps.drop all
|
||||
noroot
|
||||
nogroups
|
||||
net none
|
||||
private-tmp
|
||||
private-dev
|
||||
shell none
|
||||
seccomp
|
||||
nonewprivs
|
||||
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
Client = require "./helpers/Client"
|
||||
request = require "request"
|
||||
require("chai").should()
|
||||
ClsiApp = require "./helpers/ClsiApp"
|
||||
|
||||
describe "Broken LaTeX file", ->
|
||||
before ->
|
||||
before (done)->
|
||||
@broken_request =
|
||||
resources: [
|
||||
path: "main.tex"
|
||||
@@ -24,6 +25,7 @@ describe "Broken LaTeX file", ->
|
||||
\\end{document}
|
||||
'''
|
||||
]
|
||||
ClsiApp.ensureRunning done
|
||||
|
||||
describe "on first run", ->
|
||||
before (done) ->
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
Client = require "./helpers/Client"
|
||||
request = require "request"
|
||||
require("chai").should()
|
||||
ClsiApp = require "./helpers/ClsiApp"
|
||||
|
||||
describe "Deleting Old Files", ->
|
||||
before ->
|
||||
before (done)->
|
||||
@request =
|
||||
resources: [
|
||||
path: "main.tex"
|
||||
@@ -14,6 +15,7 @@ describe "Deleting Old Files", ->
|
||||
\\end{document}
|
||||
'''
|
||||
]
|
||||
ClsiApp.ensureRunning done
|
||||
|
||||
describe "on first run", ->
|
||||
before (done) ->
|
||||
|
||||
@@ -3,15 +3,25 @@ request = require "request"
|
||||
require("chai").should()
|
||||
fs = require "fs"
|
||||
ChildProcess = require "child_process"
|
||||
|
||||
fixturePath = (path) -> __dirname + "/../fixtures/" + path
|
||||
|
||||
ClsiApp = require "./helpers/ClsiApp"
|
||||
logger = require("logger-sharelatex")
|
||||
Path = require("path")
|
||||
fixturePath = (path) -> Path.normalize(__dirname + "/../fixtures/" + path)
|
||||
process = require "process"
|
||||
console.log process.pid, process.ppid, process.getuid(),process.getgroups(), "PID"
|
||||
try
|
||||
console.log "creating tmp directory", fixturePath("tmp")
|
||||
fs.mkdirSync(fixturePath("tmp"))
|
||||
catch e
|
||||
catch err
|
||||
console.log err, fixturePath("tmp"), "unable to create fixture tmp path"
|
||||
|
||||
MOCHA_LATEX_TIMEOUT = 60 * 1000
|
||||
|
||||
convertToPng = (pdfPath, pngPath, callback = (error) ->) ->
|
||||
convert = ChildProcess.exec "convert #{fixturePath(pdfPath)} #{fixturePath(pngPath)}"
|
||||
command = "convert #{fixturePath(pdfPath)} #{fixturePath(pngPath)}"
|
||||
console.log "COMMAND"
|
||||
console.log command
|
||||
convert = ChildProcess.exec command
|
||||
stdout = ""
|
||||
convert.stdout.on "data", (chunk) -> console.log "STDOUT", chunk.toString()
|
||||
convert.stderr.on "data", (chunk) -> console.log "STDERR", chunk.toString()
|
||||
@@ -25,7 +35,10 @@ compare = (originalPath, generatedPath, callback = (error, same) ->) ->
|
||||
proc.stderr.on "data", (chunk) -> stderr += chunk
|
||||
proc.on "exit", () ->
|
||||
if stderr.trim() == "0 (0)"
|
||||
fs.unlink diff_file # remove output diff if test matches expected image
|
||||
# remove output diff if test matches expected image
|
||||
fs.unlink diff_file, (err) ->
|
||||
if err
|
||||
throw err
|
||||
callback null, true
|
||||
else
|
||||
console.log "compare result", stderr
|
||||
@@ -40,7 +53,6 @@ checkPdfInfo = (pdfPath, callback = (error, output) ->) ->
|
||||
if stdout.match(/Optimized:\s+yes/)
|
||||
callback null, true
|
||||
else
|
||||
console.log "pdfinfo result", stdout
|
||||
callback null, false
|
||||
|
||||
compareMultiplePages = (project_id, callback = (error) ->) ->
|
||||
@@ -57,6 +69,8 @@ compareMultiplePages = (project_id, callback = (error) ->) ->
|
||||
compareNext 0, callback
|
||||
|
||||
comparePdf = (project_id, example_dir, callback = (error) ->) ->
|
||||
console.log "CONVERT"
|
||||
console.log "tmp/#{project_id}.pdf", "tmp/#{project_id}-generated.png"
|
||||
convertToPng "tmp/#{project_id}.pdf", "tmp/#{project_id}-generated.png", (error) =>
|
||||
throw error if error?
|
||||
convertToPng "examples/#{example_dir}/output.pdf", "tmp/#{project_id}-source.png", (error) =>
|
||||
@@ -75,6 +89,7 @@ comparePdf = (project_id, example_dir, callback = (error) ->) ->
|
||||
downloadAndComparePdf = (project_id, example_dir, url, callback = (error) ->) ->
|
||||
writeStream = fs.createWriteStream(fixturePath("tmp/#{project_id}.pdf"))
|
||||
request.get(url).pipe(writeStream)
|
||||
console.log("writing file out", fixturePath("tmp/#{project_id}.pdf"))
|
||||
writeStream.on "close", () =>
|
||||
checkPdfInfo "tmp/#{project_id}.pdf", (error, optimised) =>
|
||||
throw error if error?
|
||||
@@ -85,7 +100,9 @@ Client.runServer(4242, fixturePath("examples"))
|
||||
|
||||
describe "Example Documents", ->
|
||||
before (done) ->
|
||||
ChildProcess.exec("rm test/acceptance/fixtures/tmp/*").on "exit", () -> done()
|
||||
ChildProcess.exec("rm test/acceptance/fixtures/tmp/*").on "exit", () ->
|
||||
ClsiApp.ensureRunning done
|
||||
|
||||
|
||||
for example_dir in fs.readdirSync fixturePath("examples")
|
||||
do (example_dir) ->
|
||||
@@ -94,6 +111,7 @@ describe "Example Documents", ->
|
||||
@project_id = Client.randomId() + "_" + example_dir
|
||||
|
||||
it "should generate the correct pdf", (done) ->
|
||||
this.timeout(MOCHA_LATEX_TIMEOUT)
|
||||
Client.compileDirectory @project_id, fixturePath("examples"), example_dir, 4242, (error, res, body) =>
|
||||
if error || body?.compile?.status is "failure"
|
||||
console.log "DEBUG: error", error, "body", JSON.stringify(body)
|
||||
@@ -101,6 +119,7 @@ describe "Example Documents", ->
|
||||
downloadAndComparePdf(@project_id, example_dir, pdf.url, done)
|
||||
|
||||
it "should generate the correct pdf on the second run as well", (done) ->
|
||||
this.timeout(MOCHA_LATEX_TIMEOUT)
|
||||
Client.compileDirectory @project_id, fixturePath("examples"), example_dir, 4242, (error, res, body) =>
|
||||
if error || body?.compile?.status is "failure"
|
||||
console.log "DEBUG: error", error, "body", JSON.stringify(body)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
Client = require "./helpers/Client"
|
||||
request = require "request"
|
||||
require("chai").should()
|
||||
ClsiApp = require "./helpers/ClsiApp"
|
||||
|
||||
describe "Simple LaTeX file", ->
|
||||
before (done) ->
|
||||
@@ -15,6 +16,7 @@ describe "Simple LaTeX file", ->
|
||||
\\end{document}
|
||||
'''
|
||||
]
|
||||
ClsiApp.ensureRunning =>
|
||||
Client.compile @project_id, @request, (@error, @res, @body) => done()
|
||||
|
||||
it "should return the PDF", ->
|
||||
|
||||
@@ -2,20 +2,24 @@ Client = require "./helpers/Client"
|
||||
request = require "request"
|
||||
require("chai").should()
|
||||
expect = require("chai").expect
|
||||
ClsiApp = require "./helpers/ClsiApp"
|
||||
crypto = require("crypto")
|
||||
|
||||
describe "Syncing", ->
|
||||
before (done) ->
|
||||
@request =
|
||||
resources: [
|
||||
path: "main.tex"
|
||||
content: '''
|
||||
content = '''
|
||||
\\documentclass{article}
|
||||
\\begin{document}
|
||||
Hello world
|
||||
\\end{document}
|
||||
'''
|
||||
@request =
|
||||
resources: [
|
||||
path: "main.tex"
|
||||
content: content
|
||||
]
|
||||
@project_id = Client.randomId()
|
||||
ClsiApp.ensureRunning =>
|
||||
Client.compile @project_id, @request, (@error, @res, @body) => done()
|
||||
|
||||
describe "from code to pdf", ->
|
||||
@@ -29,7 +33,7 @@ describe "Syncing", ->
|
||||
|
||||
describe "from pdf to code", ->
|
||||
it "should return the correct location", (done) ->
|
||||
Client.syncFromPdf @project_id, 1, 100, 200, (error, codePositions) ->
|
||||
Client.syncFromPdf @project_id, 1, 100, 200, (error, codePositions) =>
|
||||
throw error if error?
|
||||
expect(codePositions).to.deep.equal(
|
||||
code: [ { file: 'main.tex', line: 3, column: -1 } ]
|
||||
|
||||
@@ -1,23 +1,26 @@
|
||||
Client = require "./helpers/Client"
|
||||
request = require "request"
|
||||
require("chai").should()
|
||||
ClsiApp = require "./helpers/ClsiApp"
|
||||
|
||||
|
||||
describe "Timed out compile", ->
|
||||
before (done) ->
|
||||
@request =
|
||||
options:
|
||||
timeout: 1 #seconds
|
||||
timeout: 10 #seconds
|
||||
resources: [
|
||||
path: "main.tex"
|
||||
content: '''
|
||||
\\documentclass{article}
|
||||
\\begin{document}
|
||||
Hello world
|
||||
\\input{|"sleep 10"}
|
||||
\\def\\x{Hello!\\par\\x}
|
||||
\\x
|
||||
\\end{document}
|
||||
'''
|
||||
]
|
||||
@project_id = Client.randomId()
|
||||
ClsiApp.ensureRunning =>
|
||||
Client.compile @project_id, @request, (@error, @res, @body) => done()
|
||||
|
||||
it "should return a timeout error", ->
|
||||
|
||||
@@ -2,6 +2,7 @@ Client = require "./helpers/Client"
|
||||
request = require "request"
|
||||
require("chai").should()
|
||||
sinon = require "sinon"
|
||||
ClsiApp = require "./helpers/ClsiApp"
|
||||
|
||||
host = "localhost"
|
||||
|
||||
@@ -46,6 +47,7 @@ describe "Url Caching", ->
|
||||
}]
|
||||
|
||||
sinon.spy Server, "getFile"
|
||||
ClsiApp.ensureRunning =>
|
||||
Client.compile @project_id, @request, (@error, @res, @body) => done()
|
||||
|
||||
afterEach ->
|
||||
|
||||
@@ -4,6 +4,7 @@ require("chai").should()
|
||||
expect = require("chai").expect
|
||||
path = require("path")
|
||||
fs = require("fs")
|
||||
ClsiApp = require "./helpers/ClsiApp"
|
||||
|
||||
describe "Syncing", ->
|
||||
before (done) ->
|
||||
@@ -13,6 +14,7 @@ describe "Syncing", ->
|
||||
content: fs.readFileSync(path.join(__dirname,"../fixtures/naugty_strings.txt"),"utf-8")
|
||||
]
|
||||
@project_id = Client.randomId()
|
||||
ClsiApp.ensureRunning =>
|
||||
Client.compile @project_id, @request, (@error, @res, @body) => done()
|
||||
|
||||
describe "wordcount file", ->
|
||||
|
||||
@@ -30,6 +30,7 @@ module.exports = Client =
|
||||
express = require("express")
|
||||
app = express()
|
||||
app.use express.static(directory)
|
||||
console.log("starting test server on", port, host)
|
||||
app.listen(port, host).on "error", (error) ->
|
||||
console.error "error starting server:", error.message
|
||||
process.exit(1)
|
||||
|
||||
24
test/acceptance/coffee/helpers/ClsiApp.coffee
Normal file
24
test/acceptance/coffee/helpers/ClsiApp.coffee
Normal file
@@ -0,0 +1,24 @@
|
||||
app = require('../../../../app')
|
||||
require("logger-sharelatex").logger.level("info")
|
||||
logger = require("logger-sharelatex")
|
||||
Settings = require("settings-sharelatex")
|
||||
|
||||
module.exports =
|
||||
running: false
|
||||
initing: false
|
||||
callbacks: []
|
||||
ensureRunning: (callback = (error) ->) ->
|
||||
if @running
|
||||
return callback()
|
||||
else if @initing
|
||||
@callbacks.push callback
|
||||
else
|
||||
@initing = true
|
||||
@callbacks.push callback
|
||||
app.listen Settings.internal?.clsi?.port, "localhost", (error) =>
|
||||
throw error if error?
|
||||
@running = true
|
||||
logger.log("clsi running in dev mode")
|
||||
|
||||
for callback in @callbacks
|
||||
callback()
|
||||
@@ -14,7 +14,7 @@ describe "CompileController", ->
|
||||
clsi:
|
||||
url: "http://clsi.example.com"
|
||||
"./ProjectPersistenceManager": @ProjectPersistenceManager = {}
|
||||
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() }
|
||||
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub(), err:sinon.stub(), warn: sinon.stub()}
|
||||
@Settings.externalUrl = "http://www.example.com"
|
||||
@req = {}
|
||||
@res = {}
|
||||
@@ -144,7 +144,7 @@ describe "CompileController", ->
|
||||
file: @file
|
||||
line: @line.toString()
|
||||
column: @column.toString()
|
||||
@res.send = sinon.stub()
|
||||
@res.json = sinon.stub()
|
||||
|
||||
@CompileManager.syncFromCode = sinon.stub().callsArgWith(5, null, @pdfPositions = ["mock-positions"])
|
||||
@CompileController.syncFromCode @req, @res, @next
|
||||
@@ -155,8 +155,8 @@ describe "CompileController", ->
|
||||
.should.equal true
|
||||
|
||||
it "should return the positions", ->
|
||||
@res.send
|
||||
.calledWith(JSON.stringify
|
||||
@res.json
|
||||
.calledWith(
|
||||
pdf: @pdfPositions
|
||||
)
|
||||
.should.equal true
|
||||
@@ -173,7 +173,7 @@ describe "CompileController", ->
|
||||
page: @page.toString()
|
||||
h: @h.toString()
|
||||
v: @v.toString()
|
||||
@res.send = sinon.stub()
|
||||
@res.json = sinon.stub()
|
||||
|
||||
@CompileManager.syncFromPdf = sinon.stub().callsArgWith(5, null, @codePositions = ["mock-positions"])
|
||||
@CompileController.syncFromPdf @req, @res, @next
|
||||
@@ -184,8 +184,8 @@ describe "CompileController", ->
|
||||
.should.equal true
|
||||
|
||||
it "should return the positions", ->
|
||||
@res.send
|
||||
.calledWith(JSON.stringify
|
||||
@res.json
|
||||
.calledWith(
|
||||
code: @codePositions
|
||||
)
|
||||
.should.equal true
|
||||
@@ -199,7 +199,7 @@ describe "CompileController", ->
|
||||
@req.query =
|
||||
file: @file
|
||||
image: @image = "example.com/image"
|
||||
@res.send = sinon.stub()
|
||||
@res.json = sinon.stub()
|
||||
|
||||
@CompileManager.wordcount = sinon.stub().callsArgWith(4, null, @texcount = ["mock-texcount"])
|
||||
@CompileController.wordcount @req, @res, @next
|
||||
@@ -210,8 +210,8 @@ describe "CompileController", ->
|
||||
.should.equal true
|
||||
|
||||
it "should return the texcount info", ->
|
||||
@res.send
|
||||
.calledWith(JSON.stringify
|
||||
@res.json
|
||||
.calledWith(
|
||||
texcount: @texcount
|
||||
)
|
||||
.should.equal true
|
||||
|
||||
@@ -13,7 +13,14 @@ describe "CompileManager", ->
|
||||
"./ResourceWriter": @ResourceWriter = {}
|
||||
"./OutputFileFinder": @OutputFileFinder = {}
|
||||
"./OutputCacheManager": @OutputCacheManager = {}
|
||||
"settings-sharelatex": @Settings = { path: compilesDir: "/compiles/dir" }
|
||||
"settings-sharelatex": @Settings =
|
||||
path:
|
||||
compilesDir: "/compiles/dir"
|
||||
synctexBaseDir: -> "/compile"
|
||||
clsi:
|
||||
docker:
|
||||
image: "SOMEIMAGE"
|
||||
|
||||
"logger-sharelatex": @logger = { log: sinon.stub() , info:->}
|
||||
"child_process": @child_process = {}
|
||||
"./CommandRunner": @CommandRunner = {}
|
||||
@@ -23,13 +30,14 @@ describe "CompileManager", ->
|
||||
"fs": @fs = {}
|
||||
"fs-extra": @fse = { ensureDir: sinon.stub().callsArg(1) }
|
||||
@callback = sinon.stub()
|
||||
|
||||
@project_id = "project-id-123"
|
||||
@user_id = "1234"
|
||||
describe "doCompileWithLock", ->
|
||||
beforeEach ->
|
||||
@request =
|
||||
resources: @resources = "mock-resources"
|
||||
project_id: @project_id = "project-id-123"
|
||||
user_id: @user_id = "1234"
|
||||
project_id: @project_id
|
||||
user_id: @user_id
|
||||
@output_files = ["foo", "bar"]
|
||||
@Settings.compileDir = "compiles"
|
||||
@compileDir = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}"
|
||||
@@ -95,11 +103,12 @@ describe "CompileManager", ->
|
||||
@request =
|
||||
resources: @resources = "mock-resources"
|
||||
rootResourcePath: @rootResourcePath = "main.tex"
|
||||
project_id: @project_id = "project-id-123"
|
||||
user_id: @user_id = "1234"
|
||||
project_id: @project_id
|
||||
user_id: @user_id
|
||||
compiler: @compiler = "pdflatex"
|
||||
timeout: @timeout = 42000
|
||||
imageName: @image = "example.com/image"
|
||||
flags: @flags = ["-file-line-error"]
|
||||
@env = {}
|
||||
@Settings.compileDir = "compiles"
|
||||
@compileDir = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}"
|
||||
@@ -127,6 +136,7 @@ describe "CompileManager", ->
|
||||
compiler: @compiler
|
||||
timeout: @timeout
|
||||
image: @image
|
||||
flags: @flags
|
||||
environment: @env
|
||||
})
|
||||
.should.equal true
|
||||
@@ -165,6 +175,7 @@ describe "CompileManager", ->
|
||||
compiler: @compiler
|
||||
timeout: @timeout
|
||||
image: @image
|
||||
flags: @flags
|
||||
environment: {'CHKTEX_OPTIONS': '-nall -e9 -e10 -w15 -w16', 'CHKTEX_EXIT_ON_ERROR':1, 'CHKTEX_ULIMIT_OPTIONS': '-t 5 -v 64000'}
|
||||
})
|
||||
.should.equal true
|
||||
@@ -183,6 +194,7 @@ describe "CompileManager", ->
|
||||
compiler: @compiler
|
||||
timeout: @timeout
|
||||
image: @image
|
||||
flags: @flags
|
||||
environment: @env
|
||||
})
|
||||
.should.equal true
|
||||
@@ -247,16 +259,23 @@ describe "CompileManager", ->
|
||||
describe "syncFromCode", ->
|
||||
beforeEach ->
|
||||
@fs.stat = sinon.stub().callsArgWith(1, null,{isFile: ()->true})
|
||||
@child_process.execFile.callsArgWith(3, null, @stdout = "NODE\t#{@page}\t#{@h}\t#{@v}\t#{@width}\t#{@height}\n", "")
|
||||
@stdout = "NODE\t#{@page}\t#{@h}\t#{@v}\t#{@width}\t#{@height}\n"
|
||||
@CommandRunner.run = sinon.stub().callsArgWith(6, null, {stdout:@stdout})
|
||||
@CompileManager.syncFromCode @project_id, @user_id, @file_name, @line, @column, @callback
|
||||
|
||||
it "should execute the synctex binary", ->
|
||||
bin_path = Path.resolve(__dirname + "/../../../bin/synctex")
|
||||
synctex_path = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}/output.pdf"
|
||||
file_path = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}/#{@file_name}"
|
||||
@child_process.execFile
|
||||
.calledWith(bin_path, ["code", synctex_path, file_path, @line, @column], timeout: 10000)
|
||||
.should.equal true
|
||||
@CommandRunner.run
|
||||
.calledWith(
|
||||
"#{@project_id}-#{@user_id}",
|
||||
['/opt/synctex', 'code', synctex_path, file_path, @line, @column],
|
||||
"#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}",
|
||||
@Settings.clsi.docker.image,
|
||||
60000,
|
||||
{}
|
||||
).should.equal true
|
||||
|
||||
it "should call the callback with the parsed output", ->
|
||||
@callback
|
||||
@@ -272,15 +291,21 @@ describe "CompileManager", ->
|
||||
describe "syncFromPdf", ->
|
||||
beforeEach ->
|
||||
@fs.stat = sinon.stub().callsArgWith(1, null,{isFile: ()->true})
|
||||
@child_process.execFile.callsArgWith(3, null, @stdout = "NODE\t#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}/#{@file_name}\t#{@line}\t#{@column}\n", "")
|
||||
@stdout = "NODE\t#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}/#{@file_name}\t#{@line}\t#{@column}\n"
|
||||
@CommandRunner.run = sinon.stub().callsArgWith(6, null, {stdout:@stdout})
|
||||
@CompileManager.syncFromPdf @project_id, @user_id, @page, @h, @v, @callback
|
||||
|
||||
it "should execute the synctex binary", ->
|
||||
bin_path = Path.resolve(__dirname + "/../../../bin/synctex")
|
||||
synctex_path = "#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}/output.pdf"
|
||||
@child_process.execFile
|
||||
.calledWith(bin_path, ["pdf", synctex_path, @page, @h, @v], timeout: 10000)
|
||||
.should.equal true
|
||||
@CommandRunner.run
|
||||
.calledWith(
|
||||
"#{@project_id}-#{@user_id}",
|
||||
['/opt/synctex', "pdf", synctex_path, @page, @h, @v],
|
||||
"#{@Settings.path.compilesDir}/#{@project_id}-#{@user_id}",
|
||||
@Settings.clsi.docker.image,
|
||||
60000,
|
||||
{}).should.equal true
|
||||
|
||||
it "should call the callback with the parsed output", ->
|
||||
@callback
|
||||
@@ -297,8 +322,8 @@ describe "CompileManager", ->
|
||||
@fs.readFile = sinon.stub().callsArgWith(2, null, @stdout = "Encoding: ascii\nWords in text: 2")
|
||||
@callback = sinon.stub()
|
||||
|
||||
@project_id = "project-id-123"
|
||||
@timeout = 10 * 1000
|
||||
@project_id
|
||||
@timeout = 60 * 1000
|
||||
@file_name = "main.tex"
|
||||
@Settings.path.compilesDir = "/local/compile/directory"
|
||||
@image = "example.com/image"
|
||||
|
||||
145
test/unit/coffee/DockerLockManagerTests.coffee
Normal file
145
test/unit/coffee/DockerLockManagerTests.coffee
Normal file
@@ -0,0 +1,145 @@
|
||||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
require "coffee-script"
|
||||
modulePath = require('path').join __dirname, '../../../app/coffee/DockerLockManager'
|
||||
|
||||
describe "LockManager", ->
|
||||
beforeEach ->
|
||||
@LockManager = SandboxedModule.require modulePath, requires:
|
||||
"settings-sharelatex": @Settings =
|
||||
clsi: docker: {}
|
||||
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() }
|
||||
|
||||
describe "runWithLock", ->
|
||||
describe "with a single lock", ->
|
||||
beforeEach (done) ->
|
||||
@callback = sinon.stub()
|
||||
@LockManager.runWithLock "lock-one", (releaseLock) ->
|
||||
setTimeout () ->
|
||||
releaseLock(null, "hello", "world")
|
||||
, 100
|
||||
, (err, args...) =>
|
||||
@callback(err,args...)
|
||||
done()
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.calledWith(null,"hello","world").should.equal true
|
||||
|
||||
describe "with two locks", ->
|
||||
beforeEach (done) ->
|
||||
@callback1 = sinon.stub()
|
||||
@callback2 = sinon.stub()
|
||||
@LockManager.runWithLock "lock-one", (releaseLock) ->
|
||||
setTimeout () ->
|
||||
releaseLock(null, "hello", "world","one")
|
||||
, 100
|
||||
, (err, args...) =>
|
||||
@callback1(err,args...)
|
||||
@LockManager.runWithLock "lock-two", (releaseLock) ->
|
||||
setTimeout () ->
|
||||
releaseLock(null, "hello", "world","two")
|
||||
, 200
|
||||
, (err, args...) =>
|
||||
@callback2(err,args...)
|
||||
done()
|
||||
|
||||
it "should call the first callback", ->
|
||||
@callback1.calledWith(null,"hello","world","one").should.equal true
|
||||
|
||||
it "should call the second callback", ->
|
||||
@callback2.calledWith(null,"hello","world","two").should.equal true
|
||||
|
||||
describe "with lock contention", ->
|
||||
describe "where the first lock is released quickly", ->
|
||||
beforeEach (done) ->
|
||||
@LockManager.MAX_LOCK_WAIT_TIME = 1000
|
||||
@LockManager.LOCK_TEST_INTERVAL = 100
|
||||
@callback1 = sinon.stub()
|
||||
@callback2 = sinon.stub()
|
||||
@LockManager.runWithLock "lock", (releaseLock) ->
|
||||
setTimeout () ->
|
||||
releaseLock(null, "hello", "world","one")
|
||||
, 100
|
||||
, (err, args...) =>
|
||||
@callback1(err,args...)
|
||||
@LockManager.runWithLock "lock", (releaseLock) ->
|
||||
setTimeout () ->
|
||||
releaseLock(null, "hello", "world","two")
|
||||
, 200
|
||||
, (err, args...) =>
|
||||
@callback2(err,args...)
|
||||
done()
|
||||
|
||||
it "should call the first callback", ->
|
||||
@callback1.calledWith(null,"hello","world","one").should.equal true
|
||||
|
||||
it "should call the second callback", ->
|
||||
@callback2.calledWith(null,"hello","world","two").should.equal true
|
||||
|
||||
describe "where the first lock is held longer than the waiting time", ->
|
||||
beforeEach (done) ->
|
||||
@LockManager.MAX_LOCK_HOLD_TIME = 10000
|
||||
@LockManager.MAX_LOCK_WAIT_TIME = 1000
|
||||
@LockManager.LOCK_TEST_INTERVAL = 100
|
||||
@callback1 = sinon.stub()
|
||||
@callback2 = sinon.stub()
|
||||
doneOne = doneTwo = false
|
||||
finish = (key) ->
|
||||
doneOne = true if key is 1
|
||||
doneTwo = true if key is 2
|
||||
done() if doneOne and doneTwo
|
||||
@LockManager.runWithLock "lock", (releaseLock) ->
|
||||
setTimeout () ->
|
||||
releaseLock(null, "hello", "world","one")
|
||||
, 1100
|
||||
, (err, args...) =>
|
||||
@callback1(err,args...)
|
||||
finish(1)
|
||||
@LockManager.runWithLock "lock", (releaseLock) ->
|
||||
setTimeout () ->
|
||||
releaseLock(null, "hello", "world","two")
|
||||
, 100
|
||||
, (err, args...) =>
|
||||
@callback2(err,args...)
|
||||
finish(2)
|
||||
|
||||
it "should call the first callback", ->
|
||||
@callback1.calledWith(null,"hello","world","one").should.equal true
|
||||
|
||||
it "should call the second callback with an error", ->
|
||||
error = sinon.match.instanceOf Error
|
||||
@callback2.calledWith(error).should.equal true
|
||||
|
||||
describe "where the first lock is held longer than the max holding time", ->
|
||||
beforeEach (done) ->
|
||||
@LockManager.MAX_LOCK_HOLD_TIME = 1000
|
||||
@LockManager.MAX_LOCK_WAIT_TIME = 2000
|
||||
@LockManager.LOCK_TEST_INTERVAL = 100
|
||||
@callback1 = sinon.stub()
|
||||
@callback2 = sinon.stub()
|
||||
doneOne = doneTwo = false
|
||||
finish = (key) ->
|
||||
doneOne = true if key is 1
|
||||
doneTwo = true if key is 2
|
||||
done() if doneOne and doneTwo
|
||||
@LockManager.runWithLock "lock", (releaseLock) ->
|
||||
setTimeout () ->
|
||||
releaseLock(null, "hello", "world","one")
|
||||
, 1500
|
||||
, (err, args...) =>
|
||||
@callback1(err,args...)
|
||||
finish(1)
|
||||
@LockManager.runWithLock "lock", (releaseLock) ->
|
||||
setTimeout () ->
|
||||
releaseLock(null, "hello", "world","two")
|
||||
, 100
|
||||
, (err, args...) =>
|
||||
@callback2(err,args...)
|
||||
finish(2)
|
||||
|
||||
it "should call the first callback", ->
|
||||
@callback1.calledWith(null,"hello","world","one").should.equal true
|
||||
|
||||
it "should call the second callback", ->
|
||||
@callback2.calledWith(null,"hello","world","two").should.equal true
|
||||
509
test/unit/coffee/DockerRunnerTests.coffee
Normal file
509
test/unit/coffee/DockerRunnerTests.coffee
Normal file
@@ -0,0 +1,509 @@
|
||||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
expect = require('chai').expect
|
||||
require "coffee-script"
|
||||
modulePath = require('path').join __dirname, '../../../app/coffee/DockerRunner'
|
||||
Path = require "path"
|
||||
|
||||
describe "DockerRunner", ->
|
||||
beforeEach ->
|
||||
@container = container = {}
|
||||
@DockerRunner = SandboxedModule.require modulePath, requires:
|
||||
"settings-sharelatex": @Settings =
|
||||
clsi: docker: {}
|
||||
path: {}
|
||||
"logger-sharelatex": @logger = {
|
||||
log: sinon.stub(),
|
||||
error: sinon.stub(),
|
||||
info: sinon.stub(),
|
||||
warn: sinon.stub()
|
||||
}
|
||||
"dockerode": class Docker
|
||||
getContainer: sinon.stub().returns(container)
|
||||
createContainer: sinon.stub().yields(null, container)
|
||||
listContainers: sinon.stub()
|
||||
"fs": @fs = { stat: sinon.stub().yields(null,{isDirectory:()->true}) }
|
||||
"./Metrics":
|
||||
Timer: class Timer
|
||||
done: () ->
|
||||
"./LockManager":
|
||||
runWithLock: (key, runner, callback) -> runner(callback)
|
||||
@Docker = Docker
|
||||
@getContainer = Docker::getContainer
|
||||
@createContainer = Docker::createContainer
|
||||
@listContainers = Docker::listContainers
|
||||
|
||||
@directory = "/local/compile/directory"
|
||||
@mainFile = "main-file.tex"
|
||||
@compiler = "pdflatex"
|
||||
@image = "example.com/sharelatex/image:2016.2"
|
||||
@env = {}
|
||||
@callback = sinon.stub()
|
||||
@project_id = "project-id-123"
|
||||
@volumes =
|
||||
"/local/compile/directory": "/compile"
|
||||
@Settings.clsi.docker.image = @defaultImage = "default-image"
|
||||
@Settings.clsi.docker.env = PATH: "mock-path"
|
||||
|
||||
describe "run", ->
|
||||
beforeEach (done)->
|
||||
@DockerRunner._getContainerOptions = sinon.stub().returns(@options = {mockoptions: "foo"})
|
||||
@DockerRunner._fingerprintContainer = sinon.stub().returns(@fingerprint = "fingerprint")
|
||||
|
||||
@name = "project-#{@project_id}-#{@fingerprint}"
|
||||
|
||||
@command = ["mock", "command", "--outdir=$COMPILE_DIR"]
|
||||
@command_with_dir = ["mock", "command", "--outdir=/compile"]
|
||||
@timeout = 42000
|
||||
done()
|
||||
|
||||
describe "successfully", ->
|
||||
beforeEach (done)->
|
||||
@DockerRunner._runAndWaitForContainer = sinon.stub().callsArgWith(3, null, @output = "mock-output")
|
||||
@DockerRunner.run @project_id, @command, @directory, @image, @timeout, @env, (err, output)=>
|
||||
@callback(err, output)
|
||||
done()
|
||||
|
||||
it "should generate the options for the container", ->
|
||||
@DockerRunner._getContainerOptions
|
||||
.calledWith(@command_with_dir, @image, @volumes, @timeout)
|
||||
.should.equal true
|
||||
|
||||
it "should generate the fingerprint from the returned options", ->
|
||||
@DockerRunner._fingerprintContainer
|
||||
.calledWith(@options)
|
||||
.should.equal true
|
||||
|
||||
it "should do the run", ->
|
||||
@DockerRunner._runAndWaitForContainer
|
||||
.calledWith(@options, @volumes, @timeout)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.calledWith(null, @output).should.equal true
|
||||
|
||||
describe 'when path.sandboxedCompilesHostDir is set', ->
|
||||
|
||||
beforeEach ->
|
||||
@Settings.path.sandboxedCompilesHostDir = '/some/host/dir/compiles'
|
||||
@directory = '/var/lib/sharelatex/data/compiles/xyz'
|
||||
@DockerRunner._runAndWaitForContainer = sinon.stub().callsArgWith(3, null, @output = "mock-output")
|
||||
@DockerRunner.run @project_id, @command, @directory, @image, @timeout, @env, @callback
|
||||
|
||||
it 'should re-write the bind directory', ->
|
||||
volumes = @DockerRunner._runAndWaitForContainer.lastCall.args[1]
|
||||
expect(volumes).to.deep.equal {
|
||||
'/some/host/dir/compiles/xyz': '/compile'
|
||||
}
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.calledWith(null, @output).should.equal true
|
||||
|
||||
describe "when the run throws an error", ->
|
||||
beforeEach ->
|
||||
firstTime = true
|
||||
@output = "mock-output"
|
||||
@DockerRunner._runAndWaitForContainer = (options, volumes, timeout, callback = (error, output)->) =>
|
||||
if firstTime
|
||||
firstTime = false
|
||||
callback new Error("HTTP code is 500 which indicates error: server error")
|
||||
else
|
||||
callback(null, @output)
|
||||
sinon.spy @DockerRunner, "_runAndWaitForContainer"
|
||||
@DockerRunner.destroyContainer = sinon.stub().callsArg(3)
|
||||
@DockerRunner.run @project_id, @command, @directory, @image, @timeout, @env, @callback
|
||||
|
||||
it "should do the run twice", ->
|
||||
@DockerRunner._runAndWaitForContainer
|
||||
.calledTwice.should.equal true
|
||||
|
||||
it "should destroy the container in between", ->
|
||||
@DockerRunner.destroyContainer
|
||||
.calledWith(@name, null)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.calledWith(null, @output).should.equal true
|
||||
|
||||
describe "with no image", ->
|
||||
beforeEach ->
|
||||
@DockerRunner._runAndWaitForContainer = sinon.stub().callsArgWith(3, null, @output = "mock-output")
|
||||
@DockerRunner.run @project_id, @command, @directory, null, @timeout, @env, @callback
|
||||
|
||||
it "should use the default image", ->
|
||||
@DockerRunner._getContainerOptions
|
||||
.calledWith(@command_with_dir, @defaultImage, @volumes, @timeout)
|
||||
.should.equal true
|
||||
|
||||
describe "with image override", ->
|
||||
beforeEach ->
|
||||
@Settings.texliveImageNameOveride = "overrideimage.com/something"
|
||||
@DockerRunner._runAndWaitForContainer = sinon.stub().callsArgWith(3, null, @output = "mock-output")
|
||||
@DockerRunner.run @project_id, @command, @directory, @image, @timeout, @env, @callback
|
||||
|
||||
it "should use the override and keep the tag", ->
|
||||
image = @DockerRunner._getContainerOptions.args[0][1]
|
||||
image.should.equal "overrideimage.com/something/image:2016.2"
|
||||
|
||||
describe "_runAndWaitForContainer", ->
|
||||
beforeEach ->
|
||||
@options = {mockoptions: "foo", name: @name = "mock-name"}
|
||||
@DockerRunner.startContainer = (options, volumes, attachStreamHandler, callback) =>
|
||||
attachStreamHandler(null, @output = "mock-output")
|
||||
callback(null, @containerId = "container-id")
|
||||
sinon.spy @DockerRunner, "startContainer"
|
||||
@DockerRunner.waitForContainer = sinon.stub().callsArgWith(2, null, @exitCode = 42)
|
||||
@DockerRunner._runAndWaitForContainer @options, @volumes, @timeout, @callback
|
||||
|
||||
it "should create/start the container", ->
|
||||
@DockerRunner.startContainer
|
||||
.calledWith(@options, @volumes)
|
||||
.should.equal true
|
||||
|
||||
it "should wait for the container to finish", ->
|
||||
@DockerRunner.waitForContainer
|
||||
.calledWith(@name, @timeout)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback with the output", ->
|
||||
@callback.calledWith(null, @output).should.equal true
|
||||
|
||||
describe "startContainer", ->
|
||||
beforeEach ->
|
||||
@attachStreamHandler = sinon.stub()
|
||||
@attachStreamHandler.cock = true
|
||||
@options = {mockoptions: "foo", name: "mock-name"}
|
||||
@container.inspect = sinon.stub().callsArgWith(0)
|
||||
@DockerRunner.attachToContainer = (containerId, attachStreamHandler, cb)=>
|
||||
attachStreamHandler()
|
||||
cb()
|
||||
sinon.spy @DockerRunner, "attachToContainer"
|
||||
|
||||
|
||||
|
||||
describe "when the container exists", ->
|
||||
beforeEach ->
|
||||
@container.inspect = sinon.stub().callsArgWith(0)
|
||||
@container.start = sinon.stub().yields()
|
||||
|
||||
@DockerRunner.startContainer @options, @volumes, @callback, ->
|
||||
|
||||
it "should start the container with the given name", ->
|
||||
@getContainer
|
||||
.calledWith(@options.name)
|
||||
.should.equal true
|
||||
@container.start
|
||||
.called
|
||||
.should.equal true
|
||||
|
||||
it "should not try to create the container", ->
|
||||
@createContainer.called.should.equal false
|
||||
|
||||
it "should attach to the container", ->
|
||||
@DockerRunner.attachToContainer.called.should.equal true
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
|
||||
it "should attach before the container starts", ->
|
||||
sinon.assert.callOrder(@DockerRunner.attachToContainer, @container.start)
|
||||
|
||||
describe "when the container does not exist", ->
|
||||
beforeEach ()->
|
||||
exists = false
|
||||
@container.start = sinon.stub().yields()
|
||||
@container.inspect = sinon.stub().callsArgWith(0, {statusCode:404})
|
||||
@DockerRunner.startContainer @options, @volumes, @attachStreamHandler, @callback
|
||||
|
||||
it "should create the container", ->
|
||||
@createContainer
|
||||
.calledWith(@options)
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback and stream handler", ->
|
||||
@attachStreamHandler.called.should.equal true
|
||||
@callback.called.should.equal true
|
||||
|
||||
it "should attach to the container", ->
|
||||
@DockerRunner.attachToContainer.called.should.equal true
|
||||
|
||||
it "should attach before the container starts", ->
|
||||
sinon.assert.callOrder(@DockerRunner.attachToContainer, @container.start)
|
||||
|
||||
|
||||
describe "when the container is already running", ->
|
||||
beforeEach ->
|
||||
error = new Error("HTTP code is 304 which indicates error: server error - start: Cannot start container #{@name}: The container MOCKID is already running.")
|
||||
error.statusCode = 304
|
||||
@container.start = sinon.stub().yields(error)
|
||||
@container.inspect = sinon.stub().callsArgWith(0)
|
||||
@DockerRunner.startContainer @options, @volumes, @attachStreamHandler, @callback
|
||||
|
||||
it "should not try to create the container", ->
|
||||
@createContainer.called.should.equal false
|
||||
|
||||
it "should call the callback and stream handler without an error", ->
|
||||
@attachStreamHandler.called.should.equal true
|
||||
@callback.called.should.equal true
|
||||
|
||||
describe "when a volume does not exist", ->
|
||||
beforeEach ()->
|
||||
@fs.stat = sinon.stub().yields(new Error("no such path"))
|
||||
@DockerRunner.startContainer @options, @volumes, @attachStreamHandler, @callback
|
||||
|
||||
it "should not try to create the container", ->
|
||||
@createContainer.called.should.equal false
|
||||
|
||||
it "should call the callback with an error", ->
|
||||
@callback.calledWith(new Error()).should.equal true
|
||||
|
||||
describe "when a volume exists but is not a directory", ->
|
||||
beforeEach ->
|
||||
@fs.stat = sinon.stub().yields(null, {isDirectory: () -> return false})
|
||||
@DockerRunner.startContainer @options, @volumes, @attachStreamHandler, @callback
|
||||
|
||||
it "should not try to create the container", ->
|
||||
@createContainer.called.should.equal false
|
||||
|
||||
it "should call the callback with an error", ->
|
||||
@callback.calledWith(new Error()).should.equal true
|
||||
|
||||
describe "when a volume does not exist, but sibling-containers are used", ->
|
||||
beforeEach ->
|
||||
@fs.stat = sinon.stub().yields(new Error("no such path"))
|
||||
@Settings.path.sandboxedCompilesHostDir = '/some/path'
|
||||
@container.start = sinon.stub().yields()
|
||||
@DockerRunner.startContainer @options, @volumes, @callback
|
||||
|
||||
afterEach ->
|
||||
delete @Settings.path.sandboxedCompilesHostDir
|
||||
|
||||
it "should start the container with the given name", ->
|
||||
@getContainer
|
||||
.calledWith(@options.name)
|
||||
.should.equal true
|
||||
@container.start
|
||||
.called
|
||||
.should.equal true
|
||||
|
||||
it "should not try to create the container", ->
|
||||
@createContainer.called.should.equal false
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
@callback.calledWith(new Error()).should.equal false
|
||||
|
||||
describe "when the container tries to be created, but already has been (race condition)", ->
|
||||
|
||||
describe "waitForContainer", ->
|
||||
beforeEach ->
|
||||
@containerId = "container-id"
|
||||
@timeout = 5000
|
||||
@container.wait = sinon.stub().yields(null, StatusCode: @statusCode = 42)
|
||||
@container.kill = sinon.stub().yields()
|
||||
|
||||
describe "when the container returns in time", ->
|
||||
beforeEach ->
|
||||
@DockerRunner.waitForContainer @containerId, @timeout, @callback
|
||||
|
||||
it "should wait for the container", ->
|
||||
@getContainer
|
||||
.calledWith(@containerId)
|
||||
.should.equal true
|
||||
@container.wait
|
||||
.called
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback with the exit", ->
|
||||
@callback
|
||||
.calledWith(null, @statusCode)
|
||||
.should.equal true
|
||||
|
||||
describe "when the container does not return before the timeout", ->
|
||||
beforeEach (done) ->
|
||||
@container.wait = (callback = (error, exitCode) ->) ->
|
||||
setTimeout () ->
|
||||
callback(null, StatusCode: 42)
|
||||
, 100
|
||||
@timeout = 5
|
||||
@DockerRunner.waitForContainer @containerId, @timeout, (args...) =>
|
||||
@callback(args...)
|
||||
done()
|
||||
|
||||
it "should call kill on the container", ->
|
||||
@getContainer
|
||||
.calledWith(@containerId)
|
||||
.should.equal true
|
||||
@container.kill
|
||||
.called
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback with an error", ->
|
||||
error = new Error("container timed out")
|
||||
error.timedout = true
|
||||
@callback
|
||||
.calledWith(error)
|
||||
.should.equal true
|
||||
|
||||
describe "destroyOldContainers", ->
|
||||
beforeEach (done) ->
|
||||
oneHourInSeconds = 60 * 60
|
||||
oneHourInMilliseconds = oneHourInSeconds * 1000
|
||||
nowInSeconds = Date.now()/1000
|
||||
@containers = [{
|
||||
Name: "/project-old-container-name"
|
||||
Id: "old-container-id"
|
||||
Created: nowInSeconds - oneHourInSeconds - 100
|
||||
}, {
|
||||
Name: "/project-new-container-name"
|
||||
Id: "new-container-id"
|
||||
Created: nowInSeconds - oneHourInSeconds + 100
|
||||
}, {
|
||||
Name: "/totally-not-a-project-container"
|
||||
Id: "some-random-id"
|
||||
Created: nowInSeconds - (2 * oneHourInSeconds )
|
||||
}]
|
||||
@DockerRunner.MAX_CONTAINER_AGE = oneHourInMilliseconds
|
||||
@listContainers.callsArgWith(1, null, @containers)
|
||||
@DockerRunner.destroyContainer = sinon.stub().callsArg(3)
|
||||
@DockerRunner.destroyOldContainers (error) =>
|
||||
@callback(error)
|
||||
done()
|
||||
|
||||
it "should list all containers", ->
|
||||
@listContainers
|
||||
.calledWith(all: true)
|
||||
.should.equal true
|
||||
|
||||
it "should destroy old containers", ->
|
||||
@DockerRunner.destroyContainer
|
||||
.callCount
|
||||
.should.equal 1
|
||||
@DockerRunner.destroyContainer
|
||||
.calledWith("/project-old-container-name", "old-container-id")
|
||||
.should.equal true
|
||||
|
||||
it "should not destroy new containers", ->
|
||||
@DockerRunner.destroyContainer
|
||||
.calledWith("/project-new-container-name", "new-container-id")
|
||||
.should.equal false
|
||||
|
||||
it "should not destroy non-project containers", ->
|
||||
@DockerRunner.destroyContainer
|
||||
.calledWith("/totally-not-a-project-container", "some-random-id")
|
||||
.should.equal false
|
||||
|
||||
it "should callback the callback", ->
|
||||
@callback.called.should.equal true
|
||||
|
||||
|
||||
describe '_destroyContainer', ->
|
||||
beforeEach ->
|
||||
@containerId = 'some_id'
|
||||
@fakeContainer =
|
||||
remove: sinon.stub().callsArgWith(1, null)
|
||||
@Docker::getContainer = sinon.stub().returns(@fakeContainer)
|
||||
|
||||
it 'should get the container', (done) ->
|
||||
@DockerRunner._destroyContainer @containerId, false, (err) =>
|
||||
@Docker::getContainer.callCount.should.equal 1
|
||||
@Docker::getContainer.calledWith(@containerId).should.equal true
|
||||
done()
|
||||
|
||||
it 'should try to force-destroy the container when shouldForce=true', (done) ->
|
||||
@DockerRunner._destroyContainer @containerId, true, (err) =>
|
||||
@fakeContainer.remove.callCount.should.equal 1
|
||||
@fakeContainer.remove.calledWith({force: true}).should.equal true
|
||||
done()
|
||||
|
||||
it 'should not try to force-destroy the container when shouldForce=false', (done) ->
|
||||
@DockerRunner._destroyContainer @containerId, false, (err) =>
|
||||
@fakeContainer.remove.callCount.should.equal 1
|
||||
@fakeContainer.remove.calledWith({force: false}).should.equal true
|
||||
done()
|
||||
|
||||
it 'should not produce an error', (done) ->
|
||||
@DockerRunner._destroyContainer @containerId, false, (err) =>
|
||||
expect(err).to.equal null
|
||||
done()
|
||||
|
||||
describe 'when the container is already gone', ->
|
||||
beforeEach ->
|
||||
@fakeError = new Error('woops')
|
||||
@fakeError.statusCode = 404
|
||||
@fakeContainer =
|
||||
remove: sinon.stub().callsArgWith(1, @fakeError)
|
||||
@Docker::getContainer = sinon.stub().returns(@fakeContainer)
|
||||
|
||||
it 'should not produce an error', (done) ->
|
||||
@DockerRunner._destroyContainer @containerId, false, (err) =>
|
||||
expect(err).to.equal null
|
||||
done()
|
||||
|
||||
describe 'when container.destroy produces an error', (done) ->
|
||||
beforeEach ->
|
||||
@fakeError = new Error('woops')
|
||||
@fakeError.statusCode = 500
|
||||
@fakeContainer =
|
||||
remove: sinon.stub().callsArgWith(1, @fakeError)
|
||||
@Docker::getContainer = sinon.stub().returns(@fakeContainer)
|
||||
|
||||
it 'should produce an error', (done) ->
|
||||
@DockerRunner._destroyContainer @containerId, false, (err) =>
|
||||
expect(err).to.not.equal null
|
||||
expect(err).to.equal @fakeError
|
||||
done()
|
||||
|
||||
|
||||
describe 'kill', ->
|
||||
beforeEach ->
|
||||
@containerId = 'some_id'
|
||||
@fakeContainer =
|
||||
kill: sinon.stub().callsArgWith(0, null)
|
||||
@Docker::getContainer = sinon.stub().returns(@fakeContainer)
|
||||
|
||||
it 'should get the container', (done) ->
|
||||
@DockerRunner.kill @containerId, (err) =>
|
||||
@Docker::getContainer.callCount.should.equal 1
|
||||
@Docker::getContainer.calledWith(@containerId).should.equal true
|
||||
done()
|
||||
|
||||
it 'should try to force-destroy the container', (done) ->
|
||||
@DockerRunner.kill @containerId, (err) =>
|
||||
@fakeContainer.kill.callCount.should.equal 1
|
||||
done()
|
||||
|
||||
it 'should not produce an error', (done) ->
|
||||
@DockerRunner.kill @containerId, (err) =>
|
||||
expect(err).to.equal undefined
|
||||
done()
|
||||
|
||||
describe 'when the container is not actually running', ->
|
||||
beforeEach ->
|
||||
@fakeError = new Error('woops')
|
||||
@fakeError.statusCode = 500
|
||||
@fakeError.message = 'Cannot kill container <whatever> is not running'
|
||||
@fakeContainer =
|
||||
kill: sinon.stub().callsArgWith(0, @fakeError)
|
||||
@Docker::getContainer = sinon.stub().returns(@fakeContainer)
|
||||
|
||||
it 'should not produce an error', (done) ->
|
||||
@DockerRunner.kill @containerId, (err) =>
|
||||
expect(err).to.equal undefined
|
||||
done()
|
||||
|
||||
describe 'when container.kill produces a legitimate error', (done) ->
|
||||
beforeEach ->
|
||||
@fakeError = new Error('woops')
|
||||
@fakeError.statusCode = 500
|
||||
@fakeError.message = 'Totally legitimate reason to throw an error'
|
||||
@fakeContainer =
|
||||
kill: sinon.stub().callsArgWith(0, @fakeError)
|
||||
@Docker::getContainer = sinon.stub().returns(@fakeContainer)
|
||||
|
||||
it 'should produce an error', (done) ->
|
||||
@DockerRunner.kill @containerId, (err) =>
|
||||
expect(err).to.not.equal undefined
|
||||
expect(err).to.equal @fakeError
|
||||
done()
|
||||
@@ -59,3 +59,21 @@ describe "LatexRunner", ->
|
||||
mainFile = command.slice(-1)[0]
|
||||
mainFile.should.equal "$COMPILE_DIR/main-file.tex"
|
||||
|
||||
describe "with a flags option", ->
|
||||
beforeEach ->
|
||||
@LatexRunner.runLatex @project_id,
|
||||
directory: @directory
|
||||
mainFile: @mainFile
|
||||
compiler: @compiler
|
||||
image: @image
|
||||
timeout: @timeout = 42000
|
||||
flags: ["-file-line-error", "-halt-on-error"]
|
||||
@callback
|
||||
|
||||
it "should include the flags in the command", ->
|
||||
command = @CommandRunner.run.args[0][1]
|
||||
flags = command.filter (arg) ->
|
||||
(arg == "-file-line-error") || (arg == "-halt-on-error")
|
||||
flags.length.should.equal 2
|
||||
flags[0].should.equal "-file-line-error"
|
||||
flags[1].should.equal "-halt-on-error"
|
||||
|
||||
@@ -5,11 +5,14 @@ modulePath = require('path').join __dirname, '../../../app/js/LockManager'
|
||||
Path = require "path"
|
||||
Errors = require "../../../app/js/Errors"
|
||||
|
||||
describe "LockManager", ->
|
||||
describe "DockerLockManager", ->
|
||||
beforeEach ->
|
||||
@LockManager = SandboxedModule.require modulePath, requires:
|
||||
"settings-sharelatex": {}
|
||||
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() }
|
||||
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub(), err:-> }
|
||||
"fs":
|
||||
lstat:sinon.stub().callsArgWith(1)
|
||||
readdir: sinon.stub().callsArgWith(1)
|
||||
"lockfile": @Lockfile = {}
|
||||
@lockFile = "/local/compile/directory/.project-lock"
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
SandboxedModule = require('sandboxed-module')
|
||||
sinon = require('sinon')
|
||||
require('chai').should()
|
||||
expect = require('chai').expect
|
||||
modulePath = require('path').join __dirname, '../../../app/js/RequestParser'
|
||||
tk = require("timekeeper")
|
||||
|
||||
@@ -16,10 +17,12 @@ describe "RequestParser", ->
|
||||
compile:
|
||||
token: "token-123"
|
||||
options:
|
||||
imageName: "basicImageName/here:2017-1"
|
||||
compiler: "pdflatex"
|
||||
timeout: 42
|
||||
resources: []
|
||||
@RequestParser = SandboxedModule.require modulePath
|
||||
@RequestParser = SandboxedModule.require modulePath, requires:
|
||||
"settings-sharelatex": @settings = {}
|
||||
|
||||
afterEach ->
|
||||
tk.reset()
|
||||
@@ -57,6 +60,28 @@ describe "RequestParser", ->
|
||||
it "should set the compiler to pdflatex by default", ->
|
||||
@data.compiler.should.equal "pdflatex"
|
||||
|
||||
describe "with imageName set", ->
|
||||
beforeEach ->
|
||||
@RequestParser.parse @validRequest, (error, @data) =>
|
||||
|
||||
it "should set the imageName", ->
|
||||
@data.imageName.should.equal "basicImageName/here:2017-1"
|
||||
|
||||
describe "with flags set", ->
|
||||
beforeEach ->
|
||||
@validRequest.compile.options.flags = ["-file-line-error"]
|
||||
@RequestParser.parse @validRequest, (error, @data) =>
|
||||
|
||||
it "should set the flags attribute", ->
|
||||
expect(@data.flags).to.deep.equal ["-file-line-error"]
|
||||
|
||||
describe "with flags not specified", ->
|
||||
beforeEach ->
|
||||
@RequestParser.parse @validRequest, (error, @data) =>
|
||||
|
||||
it "it should have an empty flags list", ->
|
||||
expect(@data.flags).to.deep.equal []
|
||||
|
||||
describe "without a timeout specified", ->
|
||||
beforeEach ->
|
||||
delete @validRequest.compile.options.timeout
|
||||
|
||||
@@ -134,6 +134,30 @@ describe "ResourceWriter", ->
|
||||
type: "aux"
|
||||
}, {
|
||||
path: "cache/_chunk1"
|
||||
},{
|
||||
path: "figures/image-eps-converted-to.pdf"
|
||||
type: "pdf"
|
||||
},{
|
||||
path: "foo/main-figure0.md5"
|
||||
type: "md5"
|
||||
}, {
|
||||
path: "foo/main-figure0.dpth"
|
||||
type: "dpth"
|
||||
}, {
|
||||
path: "foo/main-figure0.pdf"
|
||||
type: "pdf"
|
||||
}, {
|
||||
path: "_minted-main/default-pyg-prefix.pygstyle"
|
||||
type: "pygstyle"
|
||||
}, {
|
||||
path: "_minted-main/default.pygstyle"
|
||||
type: "pygstyle"
|
||||
}, {
|
||||
path: "_minted-main/35E248B60965545BD232AE9F0FE9750D504A7AF0CD3BAA7542030FC560DFCC45.pygtex"
|
||||
type: "pygtex"
|
||||
}, {
|
||||
path: "_markdown_main/30893013dec5d869a415610079774c2f.md.tex"
|
||||
type: "tex"
|
||||
}]
|
||||
@resources = "mock-resources"
|
||||
@OutputFileFinder.findOutputFiles = sinon.stub().callsArgWith(2, null, @output_files)
|
||||
@@ -165,6 +189,46 @@ describe "ResourceWriter", ->
|
||||
.calledWith(path.join(@basePath, "cache/_chunk1"))
|
||||
.should.equal false
|
||||
|
||||
it "should not delete the epstopdf converted files", ->
|
||||
@ResourceWriter._deleteFileIfNotDirectory
|
||||
.calledWith(path.join(@basePath, "figures/image-eps-converted-to.pdf"))
|
||||
.should.equal false
|
||||
|
||||
it "should not delete the tikz md5 files", ->
|
||||
@ResourceWriter._deleteFileIfNotDirectory
|
||||
.calledWith(path.join(@basePath, "foo/main-figure0.md5"))
|
||||
.should.equal false
|
||||
|
||||
it "should not delete the tikz dpth files", ->
|
||||
@ResourceWriter._deleteFileIfNotDirectory
|
||||
.calledWith(path.join(@basePath, "foo/main-figure0.dpth"))
|
||||
.should.equal false
|
||||
|
||||
it "should not delete the tikz pdf files", ->
|
||||
@ResourceWriter._deleteFileIfNotDirectory
|
||||
.calledWith(path.join(@basePath, "foo/main-figure0.pdf"))
|
||||
.should.equal false
|
||||
|
||||
it "should not delete the minted pygstyle files", ->
|
||||
@ResourceWriter._deleteFileIfNotDirectory
|
||||
.calledWith(path.join(@basePath, "_minted-main/default-pyg-prefix.pygstyle"))
|
||||
.should.equal false
|
||||
|
||||
it "should not delete the minted default pygstyle files", ->
|
||||
@ResourceWriter._deleteFileIfNotDirectory
|
||||
.calledWith(path.join(@basePath, "_minted-main/default.pygstyle"))
|
||||
.should.equal false
|
||||
|
||||
it "should not delete the minted default pygtex files", ->
|
||||
@ResourceWriter._deleteFileIfNotDirectory
|
||||
.calledWith(path.join(@basePath, "_minted-main/35E248B60965545BD232AE9F0FE9750D504A7AF0CD3BAA7542030FC560DFCC45.pygtex"))
|
||||
.should.equal false
|
||||
|
||||
it "should not delete the markdown md.tex files", ->
|
||||
@ResourceWriter._deleteFileIfNotDirectory
|
||||
.calledWith(path.join(@basePath, "_markdown_main/30893013dec5d869a415610079774c2f.md.tex"))
|
||||
.should.equal false
|
||||
|
||||
it "should call the callback", ->
|
||||
@callback.called.should.equal true
|
||||
|
||||
|
||||
@@ -65,6 +65,22 @@ describe 'TikzManager', ->
|
||||
@callback.calledWithExactly(null, false)
|
||||
.should.equal true
|
||||
|
||||
describe "and the main file contains \\usepackage{pstool}", ->
|
||||
beforeEach ->
|
||||
@SafeReader.readFile = sinon.stub()
|
||||
.withArgs("#{@compileDir}/#{@mainFile}")
|
||||
.callsArgWith(3, null, "hello \\usepackage[random-options]{pstool}")
|
||||
@TikzManager.checkMainFile @compileDir, @mainFile, @resources, @callback
|
||||
|
||||
it "should look at the file on disk", ->
|
||||
@SafeReader.readFile
|
||||
.calledWith("#{@compileDir}/#{@mainFile}")
|
||||
.should.equal true
|
||||
|
||||
it "should call the callback with true ", ->
|
||||
@callback.calledWithExactly(null, true)
|
||||
.should.equal true
|
||||
|
||||
describe "injectOutputFile", ->
|
||||
beforeEach ->
|
||||
@rootDir = "/mock"
|
||||
|
||||
@@ -7,17 +7,18 @@ EventEmitter = require("events").EventEmitter
|
||||
describe "UrlFetcher", ->
|
||||
beforeEach ->
|
||||
@callback = sinon.stub()
|
||||
@url = "www.example.com/file"
|
||||
@url = "https://www.example.com/file/here?query=string"
|
||||
@UrlFetcher = SandboxedModule.require modulePath, requires:
|
||||
request: defaults: @defaults = sinon.stub().returns(@request = {})
|
||||
fs: @fs = {}
|
||||
"logger-sharelatex": @logger = { log: sinon.stub(), error: sinon.stub() }
|
||||
"settings-sharelatex": @settings = {}
|
||||
|
||||
it "should turn off the cookie jar in request", ->
|
||||
@defaults.calledWith(jar: false)
|
||||
.should.equal true
|
||||
|
||||
describe "_pipeUrlToFile", ->
|
||||
describe "rewrite url domain if filestoreDomainOveride is set", ->
|
||||
beforeEach ->
|
||||
@path = "/path/to/file/on/disk"
|
||||
@request.get = sinon.stub().returns(@urlStream = new EventEmitter)
|
||||
@@ -26,21 +27,54 @@ describe "UrlFetcher", ->
|
||||
@urlStream.resume = sinon.stub()
|
||||
@fs.createWriteStream = sinon.stub().returns(@fileStream = new EventEmitter)
|
||||
@fs.unlink = (file, callback) -> callback()
|
||||
@UrlFetcher.pipeUrlToFile(@url, @path, @callback)
|
||||
|
||||
it "should use the normal domain when override not set", (done)->
|
||||
@UrlFetcher.pipeUrlToFile @url, @path, =>
|
||||
@request.get.args[0][0].url.should.equal @url
|
||||
done()
|
||||
@res = statusCode: 200
|
||||
@urlStream.emit "response", @res
|
||||
@urlStream.emit "end"
|
||||
@fileStream.emit "finish"
|
||||
|
||||
|
||||
it "should use override domain when filestoreDomainOveride is set", (done)->
|
||||
@settings.filestoreDomainOveride = "192.11.11.11"
|
||||
@UrlFetcher.pipeUrlToFile @url, @path, =>
|
||||
@request.get.args[0][0].url.should.equal "192.11.11.11/file/here?query=string"
|
||||
done()
|
||||
@res = statusCode: 200
|
||||
@urlStream.emit "response", @res
|
||||
@urlStream.emit "end"
|
||||
@fileStream.emit "finish"
|
||||
|
||||
describe "pipeUrlToFile", ->
|
||||
beforeEach (done)->
|
||||
@path = "/path/to/file/on/disk"
|
||||
@request.get = sinon.stub().returns(@urlStream = new EventEmitter)
|
||||
@urlStream.pipe = sinon.stub()
|
||||
@urlStream.pause = sinon.stub()
|
||||
@urlStream.resume = sinon.stub()
|
||||
@fs.createWriteStream = sinon.stub().returns(@fileStream = new EventEmitter)
|
||||
@fs.unlink = (file, callback) -> callback()
|
||||
done()
|
||||
|
||||
describe "successfully", ->
|
||||
beforeEach (done)->
|
||||
@UrlFetcher.pipeUrlToFile @url, @path, =>
|
||||
@callback()
|
||||
done()
|
||||
@res = statusCode: 200
|
||||
@urlStream.emit "response", @res
|
||||
@urlStream.emit "end"
|
||||
@fileStream.emit "finish"
|
||||
|
||||
|
||||
it "should request the URL", ->
|
||||
@request.get
|
||||
.calledWith(sinon.match {"url": @url})
|
||||
.should.equal true
|
||||
|
||||
|
||||
describe "successfully", ->
|
||||
beforeEach ->
|
||||
@res = statusCode: 200
|
||||
@urlStream.emit "response", @res
|
||||
@urlStream.emit "end"
|
||||
@fileStream.emit "finish"
|
||||
|
||||
it "should open the file for writing", ->
|
||||
@fs.createWriteStream
|
||||
.calledWith(@path)
|
||||
@@ -55,7 +89,10 @@ describe "UrlFetcher", ->
|
||||
@callback.called.should.equal true
|
||||
|
||||
describe "with non success status code", ->
|
||||
beforeEach ->
|
||||
beforeEach (done)->
|
||||
@UrlFetcher.pipeUrlToFile @url, @path, (err)=>
|
||||
@callback(err)
|
||||
done()
|
||||
@res = statusCode: 404
|
||||
@urlStream.emit "response", @res
|
||||
@urlStream.emit "end"
|
||||
@@ -66,7 +103,10 @@ describe "UrlFetcher", ->
|
||||
.should.equal true
|
||||
|
||||
describe "with error", ->
|
||||
beforeEach ->
|
||||
beforeEach (done)->
|
||||
@UrlFetcher.pipeUrlToFile @url, @path, (err)=>
|
||||
@callback(err)
|
||||
done()
|
||||
@urlStream.emit "error", @error = new Error("something went wrong")
|
||||
|
||||
it "should call the callback with the error", ->
|
||||
|
||||
Reference in New Issue
Block a user