368 Commits

Author SHA1 Message Date
Christopher Hoskin
3ce68b44d2 Update logger to 2.1.0 2020-07-01 15:24:26 +01:00
Brian Gough
c30e6a9d4f Merge pull request #181 from overleaf/bg-fix-503-response
handle EPIPE errors in CompileController
2020-06-22 09:30:35 +01:00
Brian Gough
b1ca08fd0c handle EPIPE errors in CompileController 2020-06-18 09:54:18 +01:00
Brian Gough
d98745431b Merge pull request #180 from overleaf/bg-add-compile-groups
add compile groups support
2020-06-18 08:52:45 +01:00
Brian Gough
6b69e26de3 Merge branch 'master' into bg-add-compile-groups 2020-06-17 11:58:26 +01:00
Brian Gough
a8286e7742 Merge pull request #179 from overleaf/bg-fix-synctex-error
fix synctex error
2020-06-16 08:57:54 +01:00
Brian Gough
58c6fe7c35 Merge pull request #178 from overleaf/bg-use-lodash
migrate from underscore to lodash
2020-06-16 08:57:14 +01:00
Brian Gough
74a11c7be3 fix format 2020-06-16 08:45:53 +01:00
Brian Gough
1f3217f598 Merge branch 'master' into bg-use-lodash 2020-06-16 08:35:17 +01:00
Brian Gough
52f4bfe9e2 Merge pull request #176 from overleaf/ta-epipe-retry-revert
Remove Retries in EPIPE Errors
2020-06-16 08:33:30 +01:00
Brian Gough
a88000281f add default settings to remove wordcount and synctex containers 2020-06-15 15:49:38 +01:00
Brian Gough
b33734bab6 add initial compileGroup support 2020-06-15 15:28:53 +01:00
Brian Gough
6c7019ccb7 downgrade NotFoundError log-level 2020-06-15 11:06:54 +01:00
Brian Gough
bad3850fcc add acceptance test for synctex when project/file does not exist 2020-06-15 10:55:01 +01:00
Brian Gough
9b92793b89 migrate from underscore to lodash 2020-06-15 09:52:21 +01:00
Brian Gough
6569da0242 use json parsing in request 2020-06-12 15:15:51 +01:00
Brian Gough
33d6462875 check output file exists before running synctex 2020-06-12 15:15:27 +01:00
Brian Gough
19690e7847 Merge pull request #175 from overleaf/bg-503-on-unavailable
send 503 unavailable response on EPIPE
2020-06-12 09:29:04 +01:00
Brian Gough
5aa90abc2d Merge pull request #177 from overleaf/bg-add-docker-setting
add missing setting for optimiseInDocker
2020-06-12 09:28:31 +01:00
Brian Gough
ba7de90a50 Merge pull request #174 from overleaf/bg-error-on-missing-profile
error on missing profile
2020-06-12 09:28:06 +01:00
Tim Alby
7ceadc8599 partially revert "[DockerRunner] fix metric incrementing and error logging"
This reverts commits:
- 2b2fcca39c
- 9e82ab0890
- e3da458b37
2020-06-11 12:51:26 +02:00
Brian Gough
f077c337ec send 503 unavailable response on EPIPE 2020-06-11 11:12:02 +01:00
Brian Gough
eb603f9f31 error on missing profile 2020-06-10 11:42:07 +01:00
Brian Gough
385cdd6f0c add missing setting for optimiseInDocker 2020-06-09 11:22:28 +01:00
Brian Gough
303fb03f1f Merge pull request #173 from overleaf/bg-openout-any
add setting TEXLIVE_OPENOUT_ANY
2020-06-08 09:03:05 +01:00
Brian Gough
3e3e4503eb add setting TEXLIVE_OPENOUT_ANY 2020-06-04 11:47:22 +01:00
Brian Gough
70363a9109 Merge pull request #172 from overleaf/update-node-10.21.0
Update node to 10.21.0
2020-06-03 14:40:21 +01:00
Brian Gough
59310cbb09 update buildscript.txt to node 10.21.0 2020-06-03 11:11:51 +01:00
Brian Gough
d88136c569 update to node 10.21.0 2020-06-03 10:22:31 +01:00
Brian Gough
0d44fb704b Merge pull request #171 from overleaf/bg-fix-format
fix format and lint checks
2020-06-02 11:48:11 +01:00
Brian Gough
bf2430f1fc fix broken unit test 2020-06-02 11:12:57 +01:00
Brian Gough
2211ebcefb fix eslint errors 2020-06-02 09:51:34 +01:00
Brian Gough
440ec5553e fix unreachable code lint error 2020-06-02 09:28:04 +01:00
Brian Gough
17c14b1192 fix formatting with make format_fix 2020-06-02 09:18:38 +01:00
Brian Gough
8c60406bb5 Merge pull request #170 from overleaf/jpa-import-141
[DockerRunner] destroyOldContainers: fix a race confition
2020-06-02 09:04:59 +01:00
Brian Gough
9db18c95a5 Merge pull request #169 from overleaf/bg-record-latexmk-output
record latexmk output
2020-06-02 09:03:43 +01:00
Jakob Ackermann
985bbf27c9 Merge pull request #141 from das7pad/hotfix-container-deletion-locking
[DockerRunner] destroyOldContainers: normalize the container name
2020-05-29 12:31:50 +02:00
Jakob Ackermann
f8cb5e36af [DockerRunner] destroyOldContainers: normalize the container name
The docker api returns each name with a `/` prefix.

In order to not interfere with pending compiles, the deletion process
 has to acquire an internal lock on the container. The LockManager uses
 the plain container name without the slash: `project-xxx`.

Signed-off-by: Jakob Ackermann <das7pad@outlook.com>
2020-05-29 11:28:26 +01:00
Brian Gough
1bcb370ca1 clean up log file deletion and add unit test 2020-05-20 14:12:08 +01:00
Brian Gough
e3c278e708 add unit tests 2020-05-20 11:52:53 +01:00
Brian Gough
54896fb157 clean up the stdout/stderr recording 2020-05-20 11:45:29 +01:00
Henry Oswald
fec359afac Merge pull request #162 from overleaf/ta-jpa-epipe-retry
[DockerRunner] retry container inspect on EPIPE
2020-05-19 11:15:25 +01:00
Henry Oswald
97f5691c87 Merge pull request #166 from overleaf/jpa-port-smoke-test-patch
[misc] simplify the smoke test and process shutdown
2020-05-19 10:31:32 +01:00
Jakob Ackermann
9807b51519 [misc] apply review feedback 2020-05-19 10:30:59 +01:00
Jakob Ackermann
b8125e396a [misc] simplify the smoke test and process shutdown 2020-05-19 10:30:59 +01:00
Henry Oswald
73afa1a8d7 Merge pull request #164 from overleaf/bg-fix-buffer-deprecations
fix deprecated usage of Buffer constructor
2020-05-19 10:26:56 +01:00
Henry Oswald
942678de38 Merge pull request #163 from overleaf/bg-use-encoding-on-process-output
set encoding when reading from streams
2020-05-19 10:26:26 +01:00
Henry Oswald
3834c37013 Merge pull request #165 from overleaf/ho-retry-url-downloads
add pipeUrlToFileWithRetry for file downloads
2020-05-19 10:25:19 +01:00
Henry Oswald
a425412bdd Merge pull request #168 from overleaf/ho-dynamic-disk-size-checker-2
add refreshExpiryTimeout function
2020-05-19 10:25:12 +01:00
Henry Oswald
c004d299c1 add refreshExpiryTimeout function
on clsi all data lives inside of / dir
dynamically reduce size of EXPIRY_TIMEOUT if disk starts to get full
2020-05-18 15:17:19 +01:00
Brian Gough
5ab45c1031 record latexmk output 2020-05-15 16:08:10 +01:00
Henry Oswald
0bd99a3edc add pipeUrlToFileWithRetry function to retry file downloads 3 times 2020-05-14 13:24:58 +01:00
Brian Gough
3592ffda52 fix deprecated usage of Buffer constructor 2020-05-07 10:42:05 +01:00
Brian Gough
5b5fd2f5df set encoding when reading from streams
using .toString() works most of the time but can lead to utf8 characters being
broken across chunk boundaries.

https://nodejs.org/api/stream.html#stream_readable_setencoding_encoding
2020-05-07 10:30:14 +01:00
Henry Oswald
7254a025ae Merge pull request #146 from overleaf/bg-support-other-runtimes
support other runtimes
2020-04-23 14:42:18 +01:00
Henry Oswald
4bd567b78d Merge branch 'master' into bg-support-other-runtimes
port coffee changes to js
2020-04-23 13:59:05 +01:00
Henry Oswald
b10f0ae9ad Merge pull request #158 from overleaf/jpa-entry-point-cleanup
[misc] entry point cleanup
2020-04-23 13:52:04 +01:00
Henry Oswald
a4c64d7151 Merge pull request #155 from overleaf/ho-limit-lifespan
limit clsi lifespan via health checks and PROCESS_LIFE_SPAN_LIMIT_MS
2020-04-23 13:51:09 +01:00
Henry Oswald
557dc47e30 cleanup the shutdown code a bit 2020-04-23 11:32:33 +01:00
Henry Oswald
3513748f73 add variance into shutdown time to avoid stampeed 2020-04-14 13:36:11 +01:00
Jakob Ackermann
2b2fcca39c [DockerRunner] fix metric incrementing and error logging
- do not log on first EPIPE
- inc 'container-inspect-epipe-error' on permanent error only

Co-Authored-By: Tim Alby <timothee.alby@gmail.com>
2020-04-10 14:44:57 +02:00
Tim Alby
9e82ab0890 add metrics for EPIPE errors
Co-Authored-By: Jakob Ackermann <jakob.ackermann@overleaf.com>
2020-04-10 12:28:48 +02:00
Tim Alby
e3da458b37 retry once on EPIPE errors
Co-Authored-By: Jakob Ackermann <jakob.ackermann@overleaf.com>
2020-04-10 12:28:11 +02:00
Tim Alby
8fa4232148 fix arguments order
Co-Authored-By: Jakob Ackermann <jakob.ackermann@overleaf.com>
2020-04-10 12:27:15 +02:00
Jakob Ackermann
2bd4c1dbee Merge remote-tracking branch 'origin/master' into jpa-entry-point-cleanup 2020-04-03 14:05:36 +02:00
Jakob Ackermann
44f30d6c2e Merge pull request #159 from overleaf/jpa-fix-docker-modem-error-scheme
[misc] keep up with the error signature of dockerode/docker-modem
2020-04-03 14:04:06 +02:00
Jakob Ackermann
c56dcc4d15 Merge pull request #157 from overleaf/jpa-download-failed-metric
[misc] add a metric for failing downloads
2020-04-03 14:03:16 +02:00
Jakob Ackermann
2ff0564009 Merge pull request #156 from overleaf/jpa-logger-1-9-1
[misc] bump logger-sharelatex to 1.9.1
2020-04-03 14:03:04 +02:00
Jakob Ackermann
3db40804ba [misc] use a directory in /tmp for temporary data 2020-04-03 12:36:32 +02:00
Jakob Ackermann
4ee0dc2471 [misc] narrow down write access/ownership for the run-time user 2020-04-03 12:36:32 +02:00
Jakob Ackermann
c5d10d02fc [misc] move the sqlite database into a db/ directory 2020-04-03 12:36:31 +02:00
Jakob Ackermann
b69ea2f83f [misc] drop debug output and log docker version on stderr 2020-04-03 12:36:31 +02:00
Jakob Ackermann
638d2f30d3 [misc] add a metric for failing downloads 2020-04-03 12:35:40 +02:00
Jakob Ackermann
54ce1373af Merge pull request #160 from overleaf/jpa-build-scripts-2-1-0
[misc] bump the build-scripts to version 2.1.0
2020-04-03 12:33:06 +02:00
Jakob Ackermann
00c5ace592 [misc] bump the build-scripts to version 2.1.0
This will put acceptance and unit tests in own namespaces so that they
 can run and be teared down individually.
2020-04-03 12:18:09 +02:00
Henry Oswald
b445f7fb78 Merge branch 'master' of github.com:overleaf/clsi into ho-limit-lifespan 2020-03-31 09:52:37 +01:00
Jakob Ackermann
4a26ec975f [misc] keep up with the error signature of dockerode/docker-modem
https://github.com/apocas/docker-modem/blob/v2.1.1/lib/modem.js#L296
2020-03-27 10:39:45 +01:00
Jakob Ackermann
96a41a5f17 [misc] bump logger-sharelatex to 1.9.1 2020-03-23 16:18:07 +01:00
Henry Oswald
2e5e040475 limit clsi lifespan via health checks and PROCESS_LIFE_SPAN_LIMIT_MS 2020-03-20 13:37:58 +00:00
Miguel Serrano
93c121f1dc Merge pull request #154 from overleaf/msm-dependency-update
Dependency update
2020-03-19 16:12:51 +01:00
mserranom
4a47f21edd updated sandboxed-module, chai and metrics-sharelatex 2020-03-16 17:14:04 +01:00
mserranom
6f837f1a74 updated sequelize 2020-03-16 16:31:02 +01:00
mserranom
3ff9c18dcb updated mocha and sinon, fixed tests 2020-03-12 10:35:11 +01:00
mserranom
ffb33ddb40 removed mkdirp dependency and replaced with fs.mkdir 2020-03-12 10:22:08 +01:00
mserranom
a850fec664 updated mkdirp 2020-03-11 19:39:08 +01:00
mserranom
c49381cfba updated dockerode, heapdump, lyns and fs-extra 2020-03-11 19:25:50 +01:00
Simon Detheridge
2e211fd218 Merge pull request #153 from overleaf/ho-remove-install-texlive-startup
copy synctex over to /app/bin/synctex-mount in entrypoint
2020-03-11 12:17:57 +00:00
mserranom
f3cf7ef9a0 updated minor/patch dependencies 2020-03-11 12:20:48 +01:00
Henry Oswald
26122bef3a copy synctex over to /app/bin/synctex-mount in entrypoint 2020-03-11 11:06:46 +00:00
mserranom
8673a99de3 npm audit fix 2020-03-11 11:58:42 +01:00
mserranom
7a0724d07b updated build-scripts 2020-03-11 11:55:44 +01:00
Simon Detheridge
98fedeba16 Merge pull request #152 from overleaf/ho-remove-install-texlive-startup
remove ./bin/install_texlive_gce.sh which shouldn't be needed
2020-03-11 10:25:21 +00:00
Henry Oswald
9fc13845a1 remove ./bin/install_texlive_gce.sh which shouldn't be needed
we shouldn't have needed this for a while, I think it is a cause of
startup delay, however this should have stopped other missing texlive
images in the past which is strange
2020-03-11 10:06:55 +00:00
Simon Detheridge
bd938047e3 Merge pull request #150 from overleaf/jpa-dependencies-cleanup
[misc] dependencies cleanup
2020-02-25 17:47:02 +00:00
Miguel Serrano
a93d25562e Merge pull request #151 from overleaf/msm-decaf-smoke-load
Decaffeinate smoke and load tests
2020-02-25 18:20:35 +01:00
mserranom
bf470cf5ae moved decaffeinated files to js folder 2020-02-25 17:15:31 +01:00
decaffeinate
957f80ada4 decaffeinate: Run post-processing cleanups on loadTest.coffee 2020-02-25 17:03:58 +01:00
decaffeinate
a327c217e7 decaffeinate: Convert loadTest.coffee to JS 2020-02-25 17:03:57 +01:00
decaffeinate
d180fcd84a decaffeinate: Rename loadTest.coffee from .coffee to .js 2020-02-25 17:03:57 +01:00
decaffeinate
699000111b decaffeinate: Run post-processing cleanups on SmokeTests.coffee 2020-02-25 17:03:39 +01:00
decaffeinate
4f01b7716e decaffeinate: Convert SmokeTests.coffee to JS 2020-02-25 17:03:37 +01:00
decaffeinate
d1e0b8548e decaffeinate: Rename SmokeTests.coffee from .coffee to .js 2020-02-25 17:03:36 +01:00
Jakob Ackermann
8772e1f7b1 [misc] cleanup unused dependency on mongo and redis 2020-02-25 14:45:09 +01:00
Jakob Ackermann
8bcbffccdc [misc] rename npm-shrinkwrap.json to package-lock.json 2020-02-25 14:45:09 +01:00
Miguel Serrano
eb2e84df9b Merge pull request #149 from overleaf/msm-service-decaf
clsi decaffeination
2020-02-24 14:48:29 +01:00
mserranom
ee12573b06 added container monitor cleanup to fix hanging tests 2020-02-20 17:24:28 +01:00
mserranom
7e737bba4f fixed test paths 2020-02-19 12:38:54 +01:00
mserranom
de36ab663c prettier: convert individual decaffeinated files to Prettier format 2020-02-19 12:16:18 +01:00
mserranom
62d20ee5f0 decaffeinate: convert individual files to js 2020-02-19 12:16:17 +01:00
mserranom
8694fce0c9 decaffeinate: rename individual coffee files to js files 2020-02-19 12:16:15 +01:00
mserranom
71a50dd11f prettier: convert test/acceptance decaffeinated files to Prettier format 2020-02-19 12:16:14 +01:00
mserranom
7996f44942 decaffeinate: rename test/acceptance/coffee to test/acceptance/js 2020-02-19 12:16:11 +01:00
decaffeinate
95854a3abb decaffeinate: Run post-processing cleanups on BrokenLatexFileTests.coffee and 9 other files 2020-02-19 12:16:07 +01:00
decaffeinate
955749a7c4 decaffeinate: Convert BrokenLatexFileTests.coffee and 9 other files to JS 2020-02-19 12:16:00 +01:00
decaffeinate
a2a3fddd54 decaffeinate: Rename BrokenLatexFileTests.coffee and 9 other files from .coffee to .js 2020-02-19 12:15:54 +01:00
mserranom
7e2542319f prettier: convert test/unit decaffeinated files to Prettier format 2020-02-19 12:15:37 +01:00
mserranom
b515397b5a decaffeinate: rename test/unit/coffee to test/unit/js 2020-02-19 12:15:30 +01:00
decaffeinate
0cb5426548 decaffeinate: Run post-processing cleanups on CompileControllerTests.coffee and 17 other files 2020-02-19 12:15:25 +01:00
decaffeinate
79a0891fee decaffeinate: Convert CompileControllerTests.coffee and 17 other files to JS 2020-02-19 12:15:08 +01:00
decaffeinate
18e6b4715d decaffeinate: Rename CompileControllerTests.coffee and 17 other files from .coffee to .js 2020-02-19 12:14:56 +01:00
mserranom
cffbd4e9ef prettier: convert app/js decaffeinated files to Prettier format 2020-02-19 12:14:37 +01:00
mserranom
4576ef54fb decaffeinate: rename app/coffee dir to app/js 2020-02-19 12:14:34 +01:00
decaffeinate
c056ca6968 decaffeinate: Run post-processing cleanups on CommandRunner.coffee and 25 other files 2020-02-19 12:14:28 +01:00
decaffeinate
4655768fd2 decaffeinate: Convert CommandRunner.coffee and 25 other files to JS 2020-02-19 12:14:14 +01:00
decaffeinate
37794788ce decaffeinate: Rename CommandRunner.coffee and 25 other files from .coffee to .js 2020-02-19 12:14:01 +01:00
mserranom
f8fff476dd decaffeinate: add eslint and prettier packages 2020-02-19 12:13:44 +01:00
mserranom
e14da0f9a6 decaffeinate: update .gitignore 2020-02-19 12:11:31 +01:00
mserranom
725074c09d decaffeinate: update build scripts to es 2020-02-19 12:10:00 +01:00
mserranom
be28b9f6f9 removed unneeded default function arg preventing from decaffeination 2020-02-19 12:06:09 +01:00
Jakob Ackermann
4611e245ed Merge pull request #148 from overleaf/bg-remove-travis-yml
Remove unused .travis.yml file
2020-02-14 10:55:29 +01:00
Brian Gough
82cc99c632 remove unused .travis.yml file 2020-02-12 12:37:00 +00:00
Jakob Ackermann
b15435d1a8 Merge pull request #147 from overleaf/jpa-build-scripts-1-3-5
[misc] update the build scripts to 1.3.5
2020-02-12 13:20:31 +01:00
Jakob Ackermann
1fc3292966 [misc] update the build scripts to 1.3.5 2020-02-11 12:27:59 +01:00
Brian Gough
ec628a4e59 support other runtimes 2020-02-10 16:10:18 +00:00
Jakob Ackermann
c2295f40f8 Merge pull request #145 from overleaf/jpa-change-base-image
[misc] use node:10.19.0 as base image
2020-02-07 15:52:02 +01:00
Jakob Ackermann
e897945a17 [misc] use node:10.19.0 as base image
Also adjust the node version in the other build-script files.
2020-02-07 14:49:42 +01:00
Brian Gough
812c4e661f Merge pull request #144 from overleaf/update-node-10
update to gcr.io/overleaf-ops/node:10.19.0
2020-02-06 03:46:15 +00:00
Brian Gough
186c8dcb2f update to gcr.io/overleaf-ops/node:10.19.0 2020-02-06 03:32:28 +00:00
Eric Mc Sween
9357d1e8fd Merge pull request #143 from overleaf/em-upgrade-node
Upgrade to local node:10.18.1 image
2020-02-04 11:58:32 -05:00
Eric Mc Sween
296f4cc2ff Upgrade to local node:10.18.1 image 2020-02-04 11:30:23 -05:00
Eric Mc Sween
f285e08ee0 Merge pull request #138 from overleaf/em-hidden-output-files
Show output files in subfolders
2019-11-07 09:01:08 -05:00
Eric Mc Sween
7894269b8d Show output files in subfolders
This fixes a tiny regexp bug that prevents output files in subfolders
from being shown in the "Other logs & files" panel.

We also downgrade the corresponding log because it's very noisy and does
not indicate a problem.
2019-11-07 08:27:24 -05:00
nate stemen
4648efe4f1 Merge pull request #136 from overleaf/ns-public-repo-comment
Update PR template to reflect current open source docs
2019-10-30 09:36:09 -04:00
Eric Mc Sween
faca2c6976 Merge pull request #137 from overleaf/em-stackdriver-logging
Upgrade logging and metrics modules
2019-10-28 09:49:56 -04:00
Eric Mc Sween
10e9be3cd2 Upgrade logging and metrics modules
The new versions add the ability to send logs directly to Stackdriver.
2019-10-28 09:31:57 -04:00
Nate Stemen
dc02e986bf bump build script to 1.1.24 2019-10-25 11:03:45 -04:00
Nate Stemen
c6c9bb9d3a add public link to contributing docs 2019-10-25 11:01:37 -04:00
Christopher Hoskin
5ad9f28116 Merge pull request #135 from overleaf/csh-passthrough-texlive
Pass through TEXLIVE_IMAGE
2019-10-24 17:20:51 +01:00
Christopher Hoskin
99b95df1ad Pass through TEXLIVE_IMAGE 2019-10-24 16:58:14 +01:00
Christopher Hoskin
c6af21ffd6 Bump build script to 1.1.23 2019-10-24 16:57:08 +01:00
Eric Mc Sween
2cd1f1b3d7 Merge pull request #133 from overleaf/em-send-output-files-on-timeout
Send output files on timeout
2019-10-23 08:21:24 -04:00
Eric Mc Sween
775306aa63 Send output files on timeout
The unconventional use of callbacks to return both an error and data
after compilation created a subtle bug where the output files were
dropped by the LockManager in case of an error such as a timeout.

This prevented the frontend to show error logs when a timeout occurs,
creating confusion among users.

We now attach the output files to the error so that they reach the
controller and are sent back to the web service.
2019-10-22 16:31:37 -04:00
Miguel Serrano
a62ff6e248 Merge pull request #131 from yuantailing/fix-compiler-manager
Fix synctex for LocalCommandRunner
2019-10-08 12:48:13 +02:00
Tailing Yuan
481a49a587 fix CompileManager and LocalCommandRunner 2019-10-04 23:02:03 +08:00
Shane Kilkelly
2675fa033e Merge pull request #128 from overleaf/sk-dep-upgrades-2
Update logger, metrics
2019-07-11 12:51:16 +01:00
Shane Kilkelly
dc6af8799f update logger and metrics 2019-06-18 16:29:20 +01:00
Shane Kilkelly
61bed0da2b Merge pull request #126 from overleaf/sk-increase-hard-timeout
Increase the hard-timeout to 10 minutes.
2019-06-10 09:44:48 +01:00
Shane Kilkelly
4f6ef61626 Increase the hard-timeout to 10 minutes.
In practice most projects will still be limited to five minutes,
but this allows us to bump up the limit for some projects,
especially legacy v1 projects that have been imported to v2
2019-06-06 16:39:16 +01:00
Brian Gough
ada07ad2c3 Merge pull request #120 from das7pad/hotfix/docker-group
[docker] add support for a different docker group id on the docker host
2019-05-16 14:04:27 +01:00
Brian Gough
bc530c70e2 Merge pull request #119 from overleaf/bg-increase-acceptance-test-timeout
increase timeout for long-running acceptance tests
2019-05-16 09:17:26 +01:00
Michael Mazour
db00288bb9 Merge pull request #125 from overleaf/mm-flags-in-request
Add flags option to request JSON
2019-05-15 14:06:47 +01:00
Michael Mazour
663ec88718 Add flags option to request JSON
Adds a `flags` parameter to the request JSON, appearing under the `compile.options` key (alongside such stalwarts as `compiler`, `timeout`, etc.).

This is primarily to support `-file-line-error` as an option, but could have other uses as well.

`flags` should be an array of strings, or absent. If supplied, the listed arguments are added to the base latexmk command.
2019-05-14 16:24:34 +01:00
Tim Alby
03047f45af update Git URL in Jenkinsfile 2019-05-07 18:31:54 +02:00
Timothée Alby
11cf8a98fa Update README.md 2019-05-07 16:41:17 +01:00
Christopher Hoskin
d2c2629ef5 Bump buildscripts from 1.1.11 to 1.1.20 2019-05-03 10:29:38 +01:00
Jakob Ackermann
adfeffd254 [docker] add support for a different docker group id on the docker host
Signed-off-by: Jakob Ackermann <das7pad@outlook.com>
2019-04-23 01:53:40 +02:00
Brian Gough
bd42fe5776 increase timeout for long-running acceptance tests 2019-04-01 09:42:54 +01:00
Christopher Hoskin
3200161308 Merge pull request #116 from sharelatex/csh-formalise-node-10.15
Formalise node 10.15 update
2019-03-28 11:59:08 +00:00
Christopher Hoskin
9cb14660d4 Formalise node 10.15 update 2019-03-26 11:50:59 +00:00
Henry Oswald
31153c479c change console.log for logger.log 2019-03-22 20:42:26 +00:00
Christopher Hoskin
f422bb8011 Merge pull request #113 from sharelatex/ho-osx-epoll
add epoll_pwait to secomp profile
2019-03-04 14:57:01 +00:00
Christopher Hoskin
25c4c349d7 Merge pull request #115 from sharelatex/csh-issue-204-clsi-log-stackdriver
Bump logger to v1.6.0
2019-03-04 14:56:17 +00:00
Christopher Hoskin
e2377e1c1c Bump logger to v1.6.0 2019-03-04 12:05:28 +00:00
Brian Gough
1899d27732 increase acceptance test timeout to 1 minute 2019-02-22 13:58:12 +00:00
Brian Gough
9bf3795ceb Merge pull request #114 from sharelatex/bg-avoid-text-html-content-type-in-responses
use explicit json content-type to avoid security issues with text/html
2019-02-22 11:35:24 +00:00
Brian Gough
d20856f799 use explicit json content-type to avoid security issues with text/html 2019-02-12 16:54:59 +00:00
Henry Oswald
12fee9e4df add epoll_pwait to secomp profile
Last year golang changed from epoll_wait to epoll_pwait https://github.com/golang/go/issues/23750

This causes golang panic errors on mac when running secomp secure compiles using docker 18.09.1. It may start to become a problem on linux where we are running on 17.03.2-ce in production.
2019-01-24 12:30:37 +00:00
Christopher Hoskin
ddaa944aa3 Merge pull request #112 from sharelatex/csh-issue-1309-node-10.15.0
Upgrade to Node 10 - CLSI
2019-01-17 09:50:19 +00:00
Christopher Hoskin
a194d7ad05 Fix broken spacing 2019-01-16 15:12:23 +00:00
Christopher Hoskin
4c8b619ee8 Switch to node 10 2019-01-16 15:11:49 +00:00
Christopher Hoskin
4bd67d5e7e Merge pull request #111 from sharelatex/csh-issue-1338-bulk-upgrade
Services bulk upgrade - CLSI
2019-01-15 12:28:35 +00:00
Christopher Hoskin
c269c308ef Correctly pass command with arguments to runuser 2019-01-15 11:29:04 +00:00
Christopher Hoskin
e12ffdd535 Pass arguments to node, not to runuser 2019-01-15 11:12:21 +00:00
Christopher Hoskin
82afad7afc Add **/*.map to .gitignore 2019-01-11 12:11:36 +00:00
Christopher Hoskin
2fceac6ac8 Remove grunt 2019-01-11 12:06:45 +00:00
Christopher Hoskin
d4e9aca9e2 Bump buildscript to 1.1.11 2019-01-11 11:52:10 +00:00
Christopher Hoskin
5d2eb129e8 Init metrics at top of app.coffee 2019-01-11 10:19:47 +00:00
Christopher Hoskin
b52a8b2aa2 Bump logger to v1.5.9 and settings to v1.1.0 2019-01-11 10:18:37 +00:00
Henry Oswald
6fbdcd76d0 Merge pull request #110 from sharelatex/ho-increase-compile-size
pull clsi compile size limit into setting and bump to 7mb
2019-01-08 13:30:00 +00:00
Henry Oswald
541dac11cb pull clsi compile size limit into setting and bump to 7mb 2019-01-08 12:56:16 +00:00
Christopher Hoskin
ee7947f54d Merge pull request #107 from sharelatex/csh-issue-1309-node-6.15
Bump node to 6.15
2018-12-18 11:16:25 +00:00
Christopher Hoskin
984474ee11 Add npm-shrinkwrap.json 2018-12-18 11:03:06 +00:00
Christopher Hoskin
be855805c9 package-lock not supported until npm 5 2018-12-17 15:31:45 +00:00
Christopher Hoskin
2d023a3b03 Bump node to 6.15.1 2018-12-17 15:29:56 +00:00
Christopher Hoskin
1894e8ad5d Merge pull request #106 from sharelatex/csh-prom-metrics
Use promethus metrics
2018-12-14 10:21:40 +00:00
Christopher Hoskin
9507f0f80f Revert "Bump buildscript to 1.1.10"
This reverts commit 38874f9169.
2018-12-13 17:37:16 +00:00
Christopher Hoskin
19078fe866 Revert "Initialise metrics at begining of app"
This reverts commit 855f26c520.
2018-12-13 17:33:45 +00:00
Christopher Hoskin
38874f9169 Bump buildscript to 1.1.10 2018-12-13 14:45:40 +00:00
Christopher Hoskin
855f26c520 Initialise metrics at begining of app 2018-12-13 14:24:44 +00:00
Christopher Hoskin
8401bbdc26 Bump metrics-sharelatex to v2.0.12 2018-12-13 14:21:32 +00:00
Christopher Hoskin
71181243b3 Bump metrics-sharelatex.git to v2.0.11 2018-12-13 14:15:19 +00:00
Christopher Hoskin
0b4ae6ef8d Use metrics which labels host in timing 2018-12-11 12:11:53 +00:00
Christopher Hoskin
747c73fdad Merge pull request #105 from sharelatex/csh-204
Bump metrics to 2.0.4
2018-12-03 15:12:16 +00:00
Christopher Hoskin
1c1610a0bc Bump metrics to 2.0.4 2018-12-03 15:10:39 +00:00
Christopher Hoskin
434e819d23 Merge pull request #104 from sharelatex/csh-stackdriver
Add Prometheus Metrics to CLSIs
2018-12-03 11:45:02 +00:00
Christopher Hoskin
f92e626647 Inject routes after app defined 2018-11-29 15:49:12 +00:00
Christopher Hoskin
6159aff001 Inject metrics 2018-11-29 14:30:00 +00:00
Christopher Hoskin
49d5ad711a Bump metrics to v2.0.3 - specify tag correctly this time 2018-11-29 10:24:25 +00:00
Christopher Hoskin
bcdac34a0b Use v1.9.0 of metrics to get Prometheus support 2018-11-29 10:10:48 +00:00
Christopher Hoskin
25cb54d1d7 Merge branch 'master' into csh-stackdriver 2018-11-29 10:06:48 +00:00
Henry Oswald
75e77a3991 Merge pull request #103 from sharelatex/ho-mute-sentry-errors
have failed compiles warn rather than be an error
2018-11-28 22:35:51 +09:00
Henry Oswald
49f3b7d54f have failed compiles warn rather than be an error 2018-11-23 15:10:35 +00:00
Christopher Hoskin
f1ab938bab Merge pull request #102 from sharelatex/csh-expand-abbr
Expand CLSI to Common LaTeX Service Interface on first use
2018-11-22 09:52:30 +00:00
Christopher Hoskin
a18d49562c Expand CLSI to Common LaTeX Service Interface on first use 2018-11-22 09:13:23 +00:00
Christopher Hoskin
d3039a52f3 First attempt to use my stackdriver branch 2018-11-07 08:29:34 +00:00
Christopher Hoskin
7e07b8b4a7 Merge pull request #101 from sharelatex/csh-documentation
Add some notes on the CLSIs
2018-10-23 14:43:06 +01:00
Christopher Hoskin
473efdae70 Merge branch 'csh-documentation' of github.com:sharelatex/clsi-sharelatex into csh-documentation 2018-10-22 17:55:47 +01:00
Christopher Hoskin
3aa160b0e7 Make REAME more generic 2018-10-22 17:52:38 +01:00
Christopher Hoskin
114e4f7043 Fix indenting 2018-10-22 16:03:50 +01:00
Christopher Hoskin
cd0a71caba Add some notes on the CLSIs 2018-10-22 16:01:17 +01:00
Brian Gough
96d6fb3404 Merge pull request #100 from sharelatex/bg-create-main-file-for-pstool
use TikzManager to create main file for pstool package
2018-10-15 11:05:23 +01:00
Brian Gough
1481b4fe50 fix exception when content undefined in TikzManager 2018-10-15 10:01:52 +01:00
Brian Gough
3aad472a83 improve log message 2018-10-12 10:49:54 +01:00
Brian Gough
49ddcee0c6 use TikzManager to create main file for pstool package 2018-10-10 16:13:20 +01:00
Brian Gough
6d1545a40e Merge pull request #99 from sharelatex/bg-cache-tikz-minted-and-markdown-outputs
extend caching for tikz, minted and markdown files
2018-10-08 09:22:20 +01:00
Brian Gough
9ce7bfa8ab extend caching for tikz, minted and markdown files 2018-10-04 16:56:48 +01:00
Henry Oswald
7c4c8a9e44 remove debugging get settings function 2018-09-14 10:26:40 +01:00
Brian Gough
90436933da Merge pull request #96 from sharelatex/bg-cache-eps-to-pdf-converted-files
cache pdf files generated by epstopdf
2018-09-11 13:31:26 +01:00
Henry Oswald
77abf19f6b Merge pull request #86 from sharelatex/ho-dockerise
Dockerised clsi
2018-09-11 12:36:11 +01:00
Henry Oswald
a781c7f600 change timeout test latex code 2018-09-11 11:34:25 +01:00
Henry Oswald
b07b7a84be fix unit tests 2018-09-11 10:21:37 +01:00
Henry Oswald
58b4de905c Merge branch 'master' into ho-dockerise 2018-09-11 10:02:24 +01:00
Henry Oswald
5f9fb85613 bump wordcount timeouts, taken from 82b996b145 2018-09-11 09:55:10 +01:00
Henry Oswald
d3bb863d0a improve synctex logging 2018-09-11 09:51:20 +01:00
Brian Gough
00ebc87230 cache pdf files generated by epstopdf 2018-09-11 09:44:22 +01:00
Henry Oswald
6299832a13 don't error on a bad synctex call 2018-08-23 11:32:50 +01:00
Henry Oswald
607bb74ffa reduce log level 2018-08-23 11:16:28 +01:00
Henry Oswald
b4107b7391 fse.ensureDir when running synctex and wordcount 2018-08-23 08:34:18 +01:00
Henry Oswald
5074442702 fix unit tests 2018-08-23 00:21:05 +01:00
Henry Oswald
05ddbd3a18 try changing bin to be owned by node 2018-08-23 00:10:06 +01:00
Henry Oswald
7b773474d9 improve error reporting 2018-08-23 00:00:43 +01:00
Henry Oswald
e4d28addf9 change sync to async for lockfile debugging 2018-08-22 22:17:02 +01:00
Henry Oswald
171ad0329d fix sql query checking last access time 2018-08-22 18:21:15 +01:00
Henry Oswald
834eeffda4 add time secomp 2018-08-21 18:56:53 +01:00
Henry Oswald
0f179a7c7c add log on exited error code 2018-08-21 12:02:12 +01:00
Henry Oswald
1990f20dc0 improve error reporting 2018-08-20 10:12:32 +01:00
Henry Oswald
407c7c235b Merge branch 'ho-dockerise' of github.com:sharelatex/clsi-sharelatex into ho-dockerise 2018-08-19 11:46:11 +01:00
Henry Oswald
988f177f79 added loads of debugging 2018-08-19 11:38:27 +01:00
Christopher Hoskin
c6f49f04a9 Merge pull request #95 from sharelatex/csh-sentry
read sentry dsn from env var into config
2018-08-15 11:49:34 +01:00
Christopher Hoskin
a26d7093b4 Merge branch 'ho-dockerise' into csh-sentry 2018-08-15 09:44:02 +01:00
Henry Oswald
eec0529ef7 put FILESTORE_PARALLEL_FILE_DOWNLOADS and
FILESTORE_PARALLEL_SQL_QUERY_LIMIT into env vars
2018-08-14 15:17:56 +01:00
Christopher Hoskin
382f30f810 Revert "Put a guard on sentry dsn"
This reverts commit 95e052d059.
2018-08-13 17:36:53 +01:00
Christopher Hoskin
95e052d059 Put a guard on sentry dsn 2018-08-13 12:27:13 +01:00
Christopher Hoskin
9f79229835 Read sentry dsn from env 2018-08-03 15:33:53 +01:00
Henry Oswald
95b2e8caae comment out erroring log for moment 2018-08-01 14:32:17 +01:00
Henry Oswald
3890cdec37 null check host options 2018-08-01 14:10:22 +01:00
Henry Oswald
3e3468d9e9 reduce logging 2018-08-01 13:59:09 +01:00
Henry Oswald
9ef9a3b780 make Settings.parallelSqlQueryLimit a config setting 2018-07-31 14:38:24 +01:00
Henry Oswald
ee518c1755 fix expired projects command 2018-07-30 17:37:30 +01:00
Henry Oswald
3a9206f1e7 fix missing cb’s 2018-07-30 17:01:59 +01:00
Henry Oswald
d1ce49d6d7 add db queue file for global db query queues 2018-07-30 16:46:47 +01:00
Henry Oswald
627bed428e added a queue with 1 concurency to db queries 2018-07-30 16:22:04 +01:00
Henry Oswald
92e1240635 added some debugging 2018-07-30 15:18:25 +01:00
Henry Oswald
94a52333f7 add sync= off and read_uncommited=true to improve perf 2018-07-30 15:16:06 +01:00
Henry Oswald
c490479a1a remove some console.logs 2018-07-30 15:11:41 +01:00
Henry Oswald
f802717cb5 remove password from clsi for sql
sequalise fails when it is set to null
2018-07-30 14:04:33 +01:00
Henry Oswald
0eeee4284d bump retried and package versions 2018-07-30 11:25:28 +01:00
Henry Oswald
e1c23be845 Merge branch 'ho-dockerise' of github.com:sharelatex/clsi-sharelatex into ho-dockerise 2018-07-26 16:52:26 +01:00
Henry Oswald
67d34fdaf0 dd wal logging 2018-07-26 16:12:26 +01:00
Christopher Hoskin
465dc31e75 Push images to overleaf-ops 2018-07-18 11:32:41 +01:00
Henry Oswald
2b6032b249 only set wal for sqlite 2018-07-17 12:53:07 +01:00
Henry Oswald
3478c28fa3 Merge branch 'ho-dockerise' of github.com:sharelatex/clsi-sharelatex into ho-dockerise 2018-07-17 12:52:18 +01:00
Henry Oswald
3e26efe06f add PRAGMA journal_mode=WAL; 2018-07-17 12:50:33 +01:00
Christopher Hoskin
fb00098fc0 Bump build script to 1.1.8, drop csh-gcdm-test and csh-staging repos 2018-07-17 12:10:08 +01:00
Brian Gough
33092baf90 Merge branch 'master' of github.com:sharelatex/clsi-sharelatex 2018-07-17 10:41:14 +01:00
Brian Gough
4830e9f785 allow prune to fail to prevent build from terminating 2018-07-17 10:41:10 +01:00
Brian Gough
368f9b1c5d Merge pull request #91 from sharelatex/bg-increase-wordcount-timeout
increase timeout on wordcount
2018-07-17 10:10:36 +01:00
Henry Oswald
bcb87620b5 change override to leave image name so it works for wl_texlive 2018-07-16 17:25:14 +01:00
Henry Oswald
dd015a05cb remove express header 2018-07-16 15:38:23 +01:00
Henry Oswald
8d846f64a9 move texliveImageNameOveride further down request so it works for
compile tests
2018-07-13 11:52:49 +01:00
Henry Oswald
3545852173 quick hack to overright image name further down stack 2018-07-13 11:46:37 +01:00
Henry Oswald
7fc9412141 Merge branch 'ho-dockerise' of github.com:sharelatex/clsi-sharelatex into ho-dockerise 2018-07-13 10:42:27 +01:00
Henry Oswald
a960614eb4 added texliveImageNameOveride 2018-07-13 10:37:22 +01:00
Christopher Hoskin
38bd598eb4 Merge pull request #94 from sharelatex/csh-remote-statsd
Depend on metrics v1.8.1 for remote StatsD host
2018-07-12 12:43:40 +01:00
Christopher Hoskin
97716365af Depend on metrics v1.8.1 for remote StatsD host 2018-07-12 11:22:02 +01:00
Christopher Hoskin
c1277e9f22 Use our experimental metrics 2018-07-06 15:08:38 +01:00
Henry Oswald
a75cec7d52 added maint down endpoint 2018-07-05 15:07:07 +01:00
Henry Oswald
6464aefdb4 added filestoreDomainOveride 2018-07-03 16:41:34 +01:00
Henry Oswald
ec85957ae4 add load balance http endpoints to shut box down 2018-06-28 16:04:34 +01:00
Henry Oswald
4bfc02ef3b fix seccomp key 2018-06-26 15:38:30 +01:00
Henry Oswald
364c8097c8 add error catch to settings.defaults 2018-06-26 15:04:56 +01:00
Henry Oswald
911e1d58f7 put seccomp_profile_path into variable and try catch 2018-06-26 14:44:03 +01:00
Henry Oswald
dd93d37460 added seccomp 2018-06-26 12:43:47 +01:00
Brian Gough
82b996b145 increase timeout on wordcount 2018-06-25 14:06:18 +01:00
Christopher Hoskin
b3033c1686 Add csh-staging to repos 2018-06-13 15:47:45 +01:00
Christopher Hoskin
547ef679b4 Merge pull request #89 from sharelatex/csh-issue-601
Csh issue 601
2018-06-13 15:45:17 +01:00
Henry Oswald
b30890ef99 remove the compile npm command, it isn't needed 2018-06-12 17:48:23 +01:00
Henry Oswald
926667f365 update build scripts so smoke tests are compiled 2018-06-12 17:44:13 +01:00
Christopher Hoskin
0a70985ba5 Specify repo correctly 2018-06-12 15:26:10 +01:00
Christopher Hoskin
4ca8027cb8 Increase acceptance test timeout. 2018-06-12 15:04:14 +01:00
Christopher Hoskin
da216c52e9 Accidently left warning message commented out :( 2018-06-12 11:17:26 +01:00
Christopher Hoskin
e6532b5681 Update build scripts from 1.1.3 to 1.1.6 2018-06-12 10:22:30 +01:00
Christopher Hoskin
85aec72206 Use metadata to determine Google Cloud project dynamically. Fixes: #601 2018-06-12 10:15:17 +01:00
Henry Oswald
f000ecb681 Merge branch 'master' of github.com:sharelatex/clsi-sharelatex into ho-dockerise 2018-06-08 19:21:18 +01:00
Henry Oswald
436f69f3a6 Merge branch 'ho-dockerise' of github.com:sharelatex/clsi-sharelatex into ho-dockerise 2018-05-25 15:33:08 +01:00
Henry Oswald
38e91ab3e4 bumped timeout to 30 seconds 2018-05-25 15:30:26 +01:00
henry oswald
0b3af7d759 change synctex binary and added it to mounted volumes in docker config 2018-05-25 13:45:07 +00:00
henry oswald
9548615169 all but the sync tests should pass 2018-05-25 12:43:12 +00:00
Henry Oswald
da814b0e3a log settings on startup 2018-05-25 12:01:16 +01:00
Henry Oswald
e544ad9a23 set user to tex for tests run on ci box 2018-05-25 11:51:34 +01:00
Henry Oswald
1814f1c997 added --exit to unit tests 2018-05-24 21:59:02 +01:00
Henry Oswald
98a4e60eb7 update to 1.1.3 build scripts 2018-05-24 19:03:57 +01:00
Henry Oswald
ca23cd42ad update package.json scripts 2018-04-09 11:06:35 +01:00
Henry Oswald
b330ee2d5b grep works with command
updated build scripts
acceptence tests break, files are written as root when user is node
2018-03-29 17:07:22 +01:00
Henry Oswald
b5a7eabaab update build script and add load balancer agent 2018-03-29 12:12:29 +01:00
Henry Oswald
ec75f9fa67 add smoke test env var 2018-03-20 13:48:12 +00:00
Henry Oswald
dc1ea9d3e9 ammend comment 2018-03-19 14:22:18 +00:00
Henry Oswald
4d955a8d41 try a build with node user 2018-03-19 14:10:45 +00:00
Henry Oswald
0915ac8c60 run as app user and chmod 777 compiles dir 2018-03-19 12:56:53 +00:00
Henry Oswald
aeb6f48945 try running as root 2018-03-19 09:51:26 +00:00
Henry Oswald
8ccbfc7d32 don't put synctex in as a volume 2018-03-16 18:11:46 +00:00
Henry Oswald
0bd9377018 chown synctex and add the creation of directories in 2018-03-16 17:48:55 +00:00
Henry Oswald
3c1d7ab264 mkdir the /app/bin/synctex-mount 2018-03-16 17:40:10 +00:00
Henry Oswald
3d9a93ad61 add logging of docker options 2018-03-16 17:37:36 +00:00
Henry Oswald
17c51c2ba0 added debugging and new moving commands 2018-03-16 17:30:11 +00:00
Henry Oswald
f4226ecd0e try copying synctex betwen directories 2018-03-16 17:10:56 +00:00
Henry Oswald
6fbfcfc68b move synctex into a directory for simple mounting 2018-03-16 16:50:30 +00:00
Henry Oswald
63145cc60c add synctex back in 2018-03-16 16:22:39 +00:00
Henry Oswald
5739a2aeca comment out synctex for moment 2018-03-16 16:04:26 +00:00
Henry Oswald
9f8a68be38 add log line for connecting to a db 2018-03-16 15:29:35 +00:00
Henry Oswald
1dce40c61f make compiles dir 2018-03-16 15:25:36 +00:00
Henry Oswald
52982b8fcd remove texlive docker images 2018-03-14 15:44:58 +00:00
Henry Oswald
a741a238a8 have entrypoint kickoff download off texlive images
install script exits without error if auth fails.
2018-03-14 15:44:58 +00:00
Henry Oswald
0c1b699bd5 add docker ignore rather than make clean 2018-03-14 15:44:58 +00:00
Henry Oswald
dc3cb439d0 update build scripts 2018-03-14 15:44:58 +00:00
Henry Oswald
83c7068bd1 test new scripts on ci 2018-03-14 15:44:58 +00:00
Henry Oswald
b9d94fb428 fixed commended tests 2018-03-14 15:44:58 +00:00
Henry Oswald
7dbed15fea update scripts from latest build scripts 1.1.0 2018-03-14 15:44:58 +00:00
Henry Oswald
3c4870f688 remove touch /var/run/docker.sock which doesn’t work robustly 2018-03-14 15:44:58 +00:00
Henry Oswald
4ff1121353 add cmd back in 2018-03-14 15:44:58 +00:00
Henry Oswald
aca9100c52 set entry point for dockerfile 2018-03-14 15:44:58 +00:00
Henry Oswald
96a237fb74 removed user temporarly, created make ci task 2018-03-14 15:44:58 +00:00
Henry Oswald
4e6514b17e add logging in db.coffee 2018-03-14 15:44:58 +00:00
Henry Oswald
00cf5468d0 update jenkins task 2018-03-14 15:44:58 +00:00
Henry Oswald
177c46df98 add cache dir 2018-03-14 15:44:58 +00:00
Henry Oswald
2f96350b7c removed unused scripts 2018-03-14 15:44:58 +00:00
Henry Oswald
f1df41112b wip for ci 2018-03-14 15:44:58 +00:00
Henry Oswald
b202af3cf2 added docker runner into core codebase
supports both local command runner and docker runner

added docker files for tex live

also fixed tests so they exit correctly & removed debug lines
2018-03-14 15:44:49 +00:00
Henry Oswald
3bdd50a231 fix url fetcher tests so they exit correctly 2018-03-05 10:39:46 +00:00
Henry Oswald
3134b8aada add SYNCTEX_BIN_HOST_PATH for ci 2018-03-03 13:40:29 +00:00
Henry Oswald
aa0f9ee0be Merge branch 'ho-dockerise' of github.com:sharelatex/clsi-sharelatex into ho-dockerise 2018-03-03 13:37:00 +00:00
Henry Oswald
4dd11f3442 update docker compose ci to use extension file and dockerfile 2018-03-03 13:36:42 +00:00
Henry Oswald
ae7357778e Merge branch 'ho-dockerise' of github.com:sharelatex/clsi-sharelatex into ho-dockerise 2018-03-02 18:31:09 +00:00
Henry Oswald
c6b962a8b9 Merge branch 'master' into ho-dockerise 2018-03-02 18:18:18 +00:00
Henry Oswald
3de14a3f17 Merge branch 'master' into ho-dockerise 2018-03-02 18:16:16 +00:00
Henry Oswald
49a35c5e11 Merge branch 'master' into ho-dockerise 2018-03-02 18:12:32 +00:00
Henry Oswald
b9874b5ae5 built with 1.1.0 scripts 2018-03-02 18:08:13 +00:00
Henry Oswald
5cb3bfcbbb uncomment tests 2018-03-02 17:59:37 +00:00
Henry Oswald
1a47887e80 make timeout latex more complex(slower) 2018-03-02 17:58:34 +00:00
Henry Oswald
70f016af1f unit tests pass, acceptence fail
uncomment tests
2018-03-02 17:34:41 +00:00
Henry Oswald
b8c22f4d74 wip, docker container is correctly created 2018-03-02 17:14:23 +00:00
Henry Oswald
8f6db5baff tests pass under app user 2018-03-02 17:14:23 +00:00
Henry Oswald
d698cc318f updateded build scripts 2018-03-02 17:14:23 +00:00
Henry Oswald
12b13d6199 mount app as volume in docker container for local tests
change to overrides
2018-03-02 17:14:23 +00:00
Henry Oswald
a02adacc98 updated build sripts with 1.0.3 2018-03-02 17:14:23 +00:00
Henry Oswald
a2a8b70b74 acceptence tests pass inside docker container (apart from sync) 2018-03-02 17:14:23 +00:00
Henry Oswald
017ba3a4ec mvp
needs hacked pacth in docker runner

wip

most tests pass
2018-03-02 17:14:20 +00:00
James Allen
b64106b730 Provide hosts and siblings container as environment settings and add npm run start script
wip acceptence tests run, but don't all pass

wip

removed npm-debug from git
2018-03-02 17:14:18 +00:00
141 changed files with 20542 additions and 5096 deletions

10
.dockerignore Normal file
View File

@@ -0,0 +1,10 @@
node_modules/*
gitrev
.git
.gitignore
.npm
.nvmrc
nodemon.json
cache/
compiles/
db/

64
.eslintrc Normal file
View File

@@ -0,0 +1,64 @@
// this file was auto-generated, do not edit it directly.
// instead run bin/update_build_scripts from
// https://github.com/sharelatex/sharelatex-dev-environment
{
"extends": [
"standard",
"prettier",
"prettier/standard"
],
"parserOptions": {
"ecmaVersion": 2017
},
"plugins": [
"mocha",
"chai-expect",
"chai-friendly"
],
"env": {
"node": true,
"mocha": true
},
"rules": {
// Swap the no-unused-expressions rule with a more chai-friendly one
"no-unused-expressions": 0,
"chai-friendly/no-unused-expressions": "error"
},
"overrides": [
{
// Test specific rules
"files": ["test/**/*.js"],
"globals": {
"expect": true
},
"rules": {
// mocha-specific rules
"mocha/handle-done-callback": "error",
"mocha/no-exclusive-tests": "error",
"mocha/no-global-tests": "error",
"mocha/no-identical-title": "error",
"mocha/no-nested-tests": "error",
"mocha/no-pending-tests": "error",
"mocha/no-skipped-tests": "error",
"mocha/no-mocha-arrows": "error",
// chai-specific rules
"chai-expect/missing-assertion": "error",
"chai-expect/terminating-properties": "error",
// prefer-arrow-callback applies to all callbacks, not just ones in mocha tests.
// we don't enforce this at the top-level - just in tests to manage `this` scope
// based on mocha's context mechanism
"mocha/prefer-arrow-callback": "error"
}
},
{
// Backend specific rules
"files": ["app/**/*.js", "app.js", "index.js"],
"rules": {
// don't allow console.log in backend code
"no-console": "error"
}
}
]
}

38
.github/ISSUE_TEMPLATE.md vendored Normal file
View File

@@ -0,0 +1,38 @@
<!-- BUG REPORT TEMPLATE -->
## Steps to Reproduce
<!-- Describe the steps leading up to when / where you found the bug. -->
<!-- Screenshots may be helpful here. -->
1.
2.
3.
## Expected Behaviour
<!-- What should have happened when you completed the steps above? -->
## Observed Behaviour
<!-- What actually happened when you completed the steps above? -->
<!-- Screenshots may be helpful here. -->
## Context
<!-- How has this issue affected you? What were you trying to accomplish? -->
## Technical Info
<!-- Provide any technical details that may be applicable (or N/A if not applicable). -->
* URL:
* Browser Name and version:
* Operating System and version (desktop or mobile):
* Signed in as:
* Project and/or file:
## Analysis
<!--- Optionally, document investigation of / suggest a fix for the bug, e.g. 'comes from this line / commit' -->
## Who Needs to Know?
<!-- If you want to bring this to the attention of particular people, @-mention them below. -->
<!-- If a user reported this bug and should be notified when it is fixed, provide the Front conversation link. -->
-
-

48
.github/PULL_REQUEST_TEMPLATE.md vendored Normal file
View File

@@ -0,0 +1,48 @@
<!-- ** This is an Overleaf public repository ** -->
<!-- Please review https://github.com/overleaf/overleaf/blob/master/CONTRIBUTING.md for guidance on what is expected of a contribution. -->
### Description
#### Screenshots
#### Related Issues / PRs
### Review
#### Potential Impact
#### Manual Testing Performed
- [ ]
- [ ]
#### Accessibility
### Deployment
#### Deployment Checklist
- [ ] Update documentation not included in the PR (if any)
- [ ]
#### Metrics and Monitoring
#### Who Needs to Know?

9
.gitignore vendored
View File

@@ -1,16 +1,13 @@
**.swp **.swp
node_modules node_modules
app/js
test/unit/js
test/smoke/js
test/acceptance/js
test/acceptance/fixtures/tmp test/acceptance/fixtures/tmp
compiles compiles
app.js
.DS_Store .DS_Store
*~ *~
cache cache
.vagrant .vagrant
db.sqlite db.sqlite
db.sqlite-wal
db.sqlite-shm
config/* config/*
bin/synctex npm-debug.log

2
.nvmrc
View File

@@ -1 +1 @@
6.11.2 10.21.0

7
.prettierrc Normal file
View File

@@ -0,0 +1,7 @@
# This file was auto-generated, do not edit it directly.
# Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment
{
"semi": false,
"singleQuote": true
}

View File

@@ -1,15 +0,0 @@
language: node_js
before_install:
- npm install -g grunt-cli
install:
- npm install
- grunt install
script:
- grunt test:unit
services:
- redis-server
- mongodb

35
.viminfo Normal file
View File

@@ -0,0 +1,35 @@
# This viminfo file was generated by Vim 7.4.
# You may edit it if you're careful!
# Value of 'encoding' when this file was written
*encoding=latin1
# hlsearch on (H) or off (h):
~h
# Command Line History (newest to oldest):
:x
# Search String History (newest to oldest):
# Expression History (newest to oldest):
# Input Line History (newest to oldest):
# Input Line History (newest to oldest):
# Registers:
# File marks:
'0 1 0 ~/hello
# Jumplist (newest first):
-' 1 0 ~/hello
# History of marks within files (newest to oldest):
> ~/hello
" 1 0
^ 1 1
. 1 0
+ 1 0

30
Dockerfile Normal file
View File

@@ -0,0 +1,30 @@
# This file was auto-generated, do not edit it directly.
# Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment
FROM node:10.21.0 as base
WORKDIR /app
COPY install_deps.sh /app
RUN chmod 0755 ./install_deps.sh && ./install_deps.sh
ENTRYPOINT ["/bin/sh", "entrypoint.sh"]
COPY entrypoint.sh /app
FROM base as app
#wildcard as some files may not be in all repos
COPY package*.json npm-shrink*.json /app/
RUN npm install --quiet
COPY . /app
FROM base
COPY --from=app /app /app
RUN mkdir -p cache compiles db \
&& chown node:node cache compiles db
CMD ["node", "--expose-gc", "app.js"]

View File

@@ -1,104 +0,0 @@
spawn = require("child_process").spawn
module.exports = (grunt) ->
grunt.initConfig
coffee:
app_src:
expand: true,
flatten: true,
cwd: "app"
src: ['coffee/*.coffee'],
dest: 'app/js/',
ext: '.js'
app:
src: "app.coffee"
dest: "app.js"
unit_tests:
expand: true
cwd: "test/unit/coffee"
src: ["**/*.coffee"]
dest: "test/unit/js/"
ext: ".js"
acceptance_tests:
expand: true
cwd: "test/acceptance/coffee"
src: ["**/*.coffee"]
dest: "test/acceptance/js/"
ext: ".js"
smoke_tests:
expand: true
cwd: "test/smoke/coffee"
src: ["**/*.coffee"]
dest: "test/smoke/js"
ext: ".js"
clean:
app: ["app/js/"]
unit_tests: ["test/unit/js"]
acceptance_tests: ["test/acceptance/js"]
smoke_tests: ["test/smoke/js"]
execute:
app:
src: "app.js"
mkdir:
all:
options:
create: ["cache", "compiles"]
mochaTest:
unit:
options:
reporter: "spec"
grep: grunt.option("grep")
src: ["test/unit/js/**/*.js"]
acceptance:
options:
reporter: "spec"
timeout: 40000
grep: grunt.option("grep")
src: ["test/acceptance/js/**/*.js"]
smoke:
options:
reported: "spec"
timeout: 10000
src: ["test/smoke/js/**/*.js"]
grunt.loadNpmTasks 'grunt-contrib-coffee'
grunt.loadNpmTasks 'grunt-contrib-clean'
grunt.loadNpmTasks 'grunt-mocha-test'
grunt.loadNpmTasks 'grunt-shell'
grunt.loadNpmTasks 'grunt-execute'
grunt.loadNpmTasks 'grunt-bunyan'
grunt.loadNpmTasks 'grunt-mkdir'
grunt.registerTask 'compile:bin', () ->
callback = @async()
proc = spawn "cc", [
"-o", "bin/synctex", "-Isrc/synctex",
"src/synctex.c", "src/synctex/synctex_parser.c", "src/synctex/synctex_parser_utils.c", "-lz"
], stdio: "inherit"
proc.on "close", callback
grunt.registerTask 'compile:app', ['clean:app', 'coffee:app', 'coffee:app_src', 'coffee:smoke_tests', 'compile:bin']
grunt.registerTask 'run', ['compile:app', 'bunyan', 'execute']
grunt.registerTask 'compile:unit_tests', ['clean:unit_tests', 'coffee:unit_tests']
grunt.registerTask 'test:unit', ['compile:app', 'compile:unit_tests', 'mochaTest:unit']
grunt.registerTask 'compile:acceptance_tests', ['clean:acceptance_tests', 'coffee:acceptance_tests']
grunt.registerTask 'test:acceptance', ['compile:acceptance_tests', 'mochaTest:acceptance']
grunt.registerTask 'compile:smoke_tests', ['clean:smoke_tests', 'coffee:smoke_tests']
grunt.registerTask 'test:smoke', ['compile:smoke_tests', 'mochaTest:smoke']
grunt.registerTask 'install', 'compile:app'
grunt.registerTask 'default', ['mkdir', 'run']

124
Jenkinsfile vendored
View File

@@ -1,79 +1,83 @@
pipeline { String cron_string = BRANCH_NAME == "master" ? "@daily" : ""
pipeline {
agent any agent any
environment {
GIT_PROJECT = "clsi"
JENKINS_WORKFLOW = "clsi-sharelatex"
TARGET_URL = "${env.JENKINS_URL}blue/organizations/jenkins/${JENKINS_WORKFLOW}/detail/$BRANCH_NAME/$BUILD_NUMBER/pipeline"
GIT_API_URL = "https://api.github.com/repos/overleaf/${GIT_PROJECT}/statuses/$GIT_COMMIT"
}
triggers { triggers {
pollSCM('* * * * *') pollSCM('* * * * *')
cron('@daily') cron(cron_string)
} }
stages { stages {
stage('Clean') {
steps {
// This is a terrible hack to set the file ownership to jenkins:jenkins so we can cleanup the directory
sh 'docker run -v $(pwd):/app --rm busybox /bin/chown -R 111:119 /app'
sh 'rm -fr node_modules'
}
}
stage('Install') { stage('Install') {
agent { steps {
docker { withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {
image 'node:6.11.2' sh "curl $GIT_API_URL \
args "-v /var/lib/jenkins/.npm:/tmp/.npm -e HOME=/tmp" --data '{ \
reuseNode true \"state\" : \"pending\", \
\"target_url\": \"$TARGET_URL\", \
\"description\": \"Your build is underway\", \
\"context\": \"ci/jenkins\" }' \
-u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD"
} }
} }
}
stage('Build') {
steps { steps {
sh 'git config --global core.logallrefupdates false' sh 'make build'
sh 'rm -fr node_modules'
checkout([$class: 'GitSCM', branches: [[name: '*/master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: '_docker-runner'], [$class: 'CloneOption', shallow: true]], userRemoteConfigs: [[credentialsId: 'GIT_DEPLOY_KEY', url: 'git@github.com:sharelatex/docker-runner-sharelatex']]])
sh 'npm install ./_docker-runner'
sh 'rm -fr ./_docker-runner ./_docker-runner@tmp'
sh 'npm install'
sh 'npm rebuild'
sh 'npm install --quiet grunt-cli'
} }
} }
stage('Compile and Test') {
agent { stage('Linting') {
docker {
image 'node:6.11.2'
reuseNode true
}
}
steps { steps {
sh 'node_modules/.bin/grunt compile:app' sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make format'
sh 'node_modules/.bin/grunt compile:acceptance_tests' sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make lint'
sh 'NODE_ENV=development node_modules/.bin/grunt test:unit'
} }
} }
stage('Unit Tests') {
steps {
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_unit'
}
}
stage('Acceptance Tests') { stage('Acceptance Tests') {
environment {
TEXLIVE_IMAGE="quay.io/sharelatex/texlive-full:2017.1"
}
steps { steps {
sh 'mkdir -p compiles cache' sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_acceptance'
// Not yet running, due to volumes/sibling containers
sh 'docker container prune -f'
sh 'docker pull $TEXLIVE_IMAGE'
sh 'docker pull sharelatex/acceptance-test-runner:clsi-6.11.2'
sh 'docker run --rm -e SIBLING_CONTAINER_USER=root -e SANDBOXED_COMPILES_HOST_DIR=$(pwd)/compiles -e SANDBOXED_COMPILES_SIBLING_CONTAINERS=true -e TEXLIVE_IMAGE=$TEXLIVE_IMAGE -v /var/run/docker.sock:/var/run/docker.sock -v $(pwd):/app sharelatex/acceptance-test-runner:clsi-6.11.2'
// This is a terrible hack to set the file ownership to jenkins:jenkins so we can cleanup the directory
sh 'docker run -v $(pwd):/app --rm busybox /bin/chown -R 111:119 /app'
sh 'rm -r compiles cache server.log db.sqlite config/settings.defaults.coffee'
} }
} }
stage('Package') {
stage('Package and docker push') {
steps { steps {
sh 'echo ${BUILD_NUMBER} > build_number.txt' sh 'echo ${BUILD_NUMBER} > build_number.txt'
sh 'touch build.tar.gz' // Avoid tar warning about files changing during read sh 'touch build.tar.gz' // Avoid tar warning about files changing during read
sh 'tar -czf build.tar.gz --exclude=build.tar.gz --exclude-vcs .' sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make tar'
withCredentials([file(credentialsId: 'gcr.io_overleaf-ops', variable: 'DOCKER_REPO_KEY_PATH')]) {
sh 'docker login -u _json_key --password-stdin https://gcr.io/overleaf-ops < ${DOCKER_REPO_KEY_PATH}'
}
sh 'DOCKER_REPO=gcr.io/overleaf-ops make publish'
sh 'docker logout https://gcr.io/overleaf-ops'
} }
} }
stage('Publish') {
stage('Publish to s3') {
steps { steps {
sh 'echo ${BRANCH_NAME}-${BUILD_NUMBER} > build_number.txt'
withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") { withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") {
s3Upload(file:'build.tar.gz', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/${BUILD_NUMBER}.tar.gz") s3Upload(file:'build.tar.gz', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/${BUILD_NUMBER}.tar.gz")
}
withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") {
// The deployment process uses this file to figure out the latest build // The deployment process uses this file to figure out the latest build
s3Upload(file:'build_number.txt', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/latest") s3Upload(file:'build_number.txt', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/latest")
} }
@@ -82,11 +86,37 @@ pipeline {
} }
post { post {
always {
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_clean'
sh 'make clean'
}
success {
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {
sh "curl $GIT_API_URL \
--data '{ \
\"state\" : \"success\", \
\"target_url\": \"$TARGET_URL\", \
\"description\": \"Your build succeeded!\", \
\"context\": \"ci/jenkins\" }' \
-u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD"
}
}
failure { failure {
mail(from: "${EMAIL_ALERT_FROM}", mail(from: "${EMAIL_ALERT_FROM}",
to: "${EMAIL_ALERT_TO}", to: "${EMAIL_ALERT_TO}",
subject: "Jenkins build failed: ${JOB_NAME}:${BUILD_NUMBER}", subject: "Jenkins build failed: ${JOB_NAME}:${BUILD_NUMBER}",
body: "Build: ${BUILD_URL}") body: "Build: ${BUILD_URL}")
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {
sh "curl $GIT_API_URL \
--data '{ \
\"state\" : \"failure\", \
\"target_url\": \"$TARGET_URL\", \
\"description\": \"Your build failed\", \
\"context\": \"ci/jenkins\" }' \
-u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD"
}
} }
} }

88
Makefile Normal file
View File

@@ -0,0 +1,88 @@
# This file was auto-generated, do not edit it directly.
# Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment
BUILD_NUMBER ?= local
BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD)
PROJECT_NAME = clsi
BUILD_DIR_NAME = $(shell pwd | xargs basename | tr -cd '[a-zA-Z0-9_.\-]')
DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml
DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \
BRANCH_NAME=$(BRANCH_NAME) \
PROJECT_NAME=$(PROJECT_NAME) \
MOCHA_GREP=${MOCHA_GREP} \
docker-compose ${DOCKER_COMPOSE_FLAGS}
DOCKER_COMPOSE_TEST_ACCEPTANCE = \
COMPOSE_PROJECT_NAME=test_acceptance_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE)
DOCKER_COMPOSE_TEST_UNIT = \
COMPOSE_PROJECT_NAME=test_unit_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE)
clean:
docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
format:
$(DOCKER_COMPOSE) run --rm test_unit npm run format
format_fix:
$(DOCKER_COMPOSE) run --rm test_unit npm run format:fix
lint:
$(DOCKER_COMPOSE) run --rm test_unit npm run lint
test: format lint test_unit test_acceptance
test_unit:
ifneq (,$(wildcard test/unit))
$(DOCKER_COMPOSE_TEST_UNIT) run --rm test_unit
$(MAKE) test_unit_clean
endif
test_clean: test_unit_clean
test_unit_clean:
ifneq (,$(wildcard test/unit))
$(DOCKER_COMPOSE_TEST_UNIT) down -v -t 0
endif
test_acceptance: test_acceptance_clean test_acceptance_pre_run test_acceptance_run
$(MAKE) test_acceptance_clean
test_acceptance_debug: test_acceptance_clean test_acceptance_pre_run test_acceptance_run_debug
$(MAKE) test_acceptance_clean
test_acceptance_run:
ifneq (,$(wildcard test/acceptance))
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance
endif
test_acceptance_run_debug:
ifneq (,$(wildcard test/acceptance))
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk
endif
test_clean: test_acceptance_clean
test_acceptance_clean:
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0
test_acceptance_pre_run:
ifneq (,$(wildcard test/acceptance/js/scripts/pre-run))
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run
endif
build:
docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
--tag gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
.
tar:
$(DOCKER_COMPOSE) up tar
publish:
docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
.PHONY: clean test test_unit test_acceptance test_clean build publish

View File

@@ -1,16 +1,38 @@
clsi-sharelatex overleaf/clsi
=============== ===============
A web api for compiling LaTeX documents in the cloud A web api for compiling LaTeX documents in the cloud
[![Build Status](https://travis-ci.org/sharelatex/clsi-sharelatex.png?branch=master)](https://travis-ci.org/sharelatex/clsi-sharelatex) The Common LaTeX Service Interface (CLSI) provides a RESTful interface to traditional LaTeX tools (or, more generally, any command line tool for composing marked-up documents into a display format such as PDF or HTML). The CLSI listens on the following ports by default:
* TCP/3009 - the RESTful interface
* TCP/3048 - reports load information
* TCP/3049 - HTTP interface to control the CLSI service
These defaults can be modified in `config/settings.defaults.coffee`.
The provided `Dockerfile` builds a docker image which has the docker command line tools installed. The configuration in `docker-compose-config.yml` mounts the docker socket, in order that the CLSI container can talk to the docker host it is running in. This allows it to spin up `sibling containers` running an image with a TeX distribution installed to perform the actual compiles.
The CLSI can be configured through the following environment variables:
* `DOCKER_RUNNER` - Set to true to use sibling containers
* `SYNCTEX_BIN_HOST_PATH` - Path to SyncTeX binary
* `COMPILES_HOST_DIR` - Working directory for LaTeX compiles
* `SQLITE_PATH` - Path to SQLite database
* `TEXLIVE_IMAGE` - The TEXLIVE docker image to use for sibling containers, e.g. `gcr.io/overleaf-ops/texlive-full:2017.1`
* `TEXLIVE_IMAGE_USER` - When using sibling containers, the user to run as in the TEXLIVE image. Defaults to `tex`
* `TEX_LIVE_IMAGE_NAME_OVERRIDE` - The name of the registry for the docker image e.g. `gcr.io/overleaf-ops`
* `FILESTORE_DOMAIN_OVERRIDE` - The url for the filestore service e.g.`http://$FILESTORE_HOST:3009`
* `STATSD_HOST` - The address of the Statsd service (used by the metrics module)
* `LISTEN_ADDRESS` - The address for the RESTful service to listen on. Set to `0.0.0.0` to listen on all network interfaces
* `SMOKE_TEST` - Whether to run smoke tests
Installation Installation
------------ ------------
The CLSI can be installed and set up as part of the entire [ShareLaTeX stack](https://github.com/sharelatex/sharelatex) (complete with front end editor and document storage), or it can be run as a standalone service. To run is as a standalone service, first checkout this repository: The CLSI can be installed and set up as part of the entire [Overleaf stack](https://github.com/overleaf/overleaf) (complete with front end editor and document storage), or it can be run as a standalone service. To run is as a standalone service, first checkout this repository:
$ git clone git@github.com:sharelatex/clsi-sharelatex.git $ git clone git@github.com:overleaf/clsi.git
Then install the require npm modules: Then install the require npm modules:
@@ -92,4 +114,4 @@ License
The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. A copy can be found in the `LICENSE` file. The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. A copy can be found in the `LICENSE` file.
Copyright (c) ShareLaTeX, 2014. Copyright (c) Overleaf, 2014-2019.

View File

@@ -1,169 +0,0 @@
CompileController = require "./app/js/CompileController"
Settings = require "settings-sharelatex"
logger = require "logger-sharelatex"
logger.initialize("clsi")
if Settings.sentry?.dsn?
logger.initializeErrorReporting(Settings.sentry.dsn)
smokeTest = require "smoke-test-sharelatex"
ContentTypeMapper = require "./app/js/ContentTypeMapper"
Errors = require './app/js/Errors'
Path = require "path"
fs = require "fs"
Metrics = require "metrics-sharelatex"
Metrics.initialize("clsi")
Metrics.open_sockets.monitor(logger)
Metrics.memory.monitor(logger)
ProjectPersistenceManager = require "./app/js/ProjectPersistenceManager"
OutputCacheManager = require "./app/js/OutputCacheManager"
require("./app/js/db").sync()
express = require "express"
bodyParser = require "body-parser"
app = express()
app.use Metrics.http.monitor(logger)
# Compile requests can take longer than the default two
# minutes (including file download time), so bump up the
# timeout a bit.
TIMEOUT = 6 * 60 * 1000
app.use (req, res, next) ->
req.setTimeout TIMEOUT
res.setTimeout TIMEOUT
next()
app.param 'project_id', (req, res, next, project_id) ->
if project_id?.match /^[a-zA-Z0-9_-]+$/
next()
else
next new Error("invalid project id")
app.param 'user_id', (req, res, next, user_id) ->
if user_id?.match /^[0-9a-f]{24}$/
next()
else
next new Error("invalid user id")
app.param 'build_id', (req, res, next, build_id) ->
if build_id?.match OutputCacheManager.BUILD_REGEX
next()
else
next new Error("invalid build id #{build_id}")
app.post "/project/:project_id/compile", bodyParser.json(limit: "5mb"), CompileController.compile
app.post "/project/:project_id/compile/stop", CompileController.stopCompile
app.delete "/project/:project_id", CompileController.clearCache
app.get "/project/:project_id/sync/code", CompileController.syncFromCode
app.get "/project/:project_id/sync/pdf", CompileController.syncFromPdf
app.get "/project/:project_id/wordcount", CompileController.wordcount
app.get "/project/:project_id/status", CompileController.status
# Per-user containers
app.post "/project/:project_id/user/:user_id/compile", bodyParser.json(limit: "5mb"), CompileController.compile
app.post "/project/:project_id/user/:user_id/compile/stop", CompileController.stopCompile
app.delete "/project/:project_id/user/:user_id", CompileController.clearCache
app.get "/project/:project_id/user/:user_id/sync/code", CompileController.syncFromCode
app.get "/project/:project_id/user/:user_id/sync/pdf", CompileController.syncFromPdf
app.get "/project/:project_id/user/:user_id/wordcount", CompileController.wordcount
ForbidSymlinks = require "./app/js/StaticServerForbidSymlinks"
# create a static server which does not allow access to any symlinks
# avoids possible mismatch of root directory between middleware check
# and serving the files
staticServer = ForbidSymlinks express.static, Settings.path.compilesDir, setHeaders: (res, path, stat) ->
if Path.basename(path) == "output.pdf"
# Calculate an etag in the same way as nginx
# https://github.com/tj/send/issues/65
etag = (path, stat) ->
'"' + Math.ceil(+stat.mtime / 1000).toString(16) +
'-' + Number(stat.size).toString(16) + '"'
res.set("Etag", etag(path, stat))
res.set("Content-Type", ContentTypeMapper.map(path))
app.get "/project/:project_id/user/:user_id/build/:build_id/output/*", (req, res, next) ->
# for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
req.url = "/#{req.params.project_id}-#{req.params.user_id}/" + OutputCacheManager.path(req.params.build_id, "/#{req.params[0]}")
staticServer(req, res, next)
app.get "/project/:project_id/build/:build_id/output/*", (req, res, next) ->
# for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
req.url = "/#{req.params.project_id}/" + OutputCacheManager.path(req.params.build_id, "/#{req.params[0]}")
staticServer(req, res, next)
app.get "/project/:project_id/user/:user_id/output/*", (req, res, next) ->
# for specific user get the path to the top level file
req.url = "/#{req.params.project_id}-#{req.params.user_id}/#{req.params[0]}"
staticServer(req, res, next)
app.get "/project/:project_id/output/*", (req, res, next) ->
if req.query?.build? && req.query.build.match(OutputCacheManager.BUILD_REGEX)
# for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
req.url = "/#{req.params.project_id}/" + OutputCacheManager.path(req.query.build, "/#{req.params[0]}")
else
req.url = "/#{req.params.project_id}/#{req.params[0]}"
staticServer(req, res, next)
app.get "/oops", (req, res, next) ->
logger.error {err: "hello"}, "test error"
res.send "error\n"
app.get "/status", (req, res, next) ->
res.send "CLSI is alive\n"
resCacher =
contentType:(@setContentType)->
send:(@code, @body)->
#default the server to be down
code:500
body:{}
setContentType:"application/json"
if Settings.smokeTest
do runSmokeTest = ->
logger.log("running smoke tests")
smokeTest.run(require.resolve(__dirname + "/test/smoke/js/SmokeTests.js"))({}, resCacher)
setTimeout(runSmokeTest, 30 * 1000)
app.get "/health_check", (req, res)->
res.contentType(resCacher?.setContentType)
res.status(resCacher?.code).send(resCacher?.body)
profiler = require "v8-profiler"
app.get "/profile", (req, res) ->
time = parseInt(req.query.time || "1000")
profiler.startProfiling("test")
setTimeout () ->
profile = profiler.stopProfiling("test")
res.json(profile)
, time
app.get "/heapdump", (req, res)->
require('heapdump').writeSnapshot '/tmp/' + Date.now() + '.clsi.heapsnapshot', (err, filename)->
res.send filename
app.use (error, req, res, next) ->
if error instanceof Errors.NotFoundError
logger.warn {err: error, url: req.url}, "not found error"
return res.sendStatus(404)
else
logger.error {err: error, url: req.url}, "server error"
res.sendStatus(error?.statusCode || 500)
app.listen port = (Settings.internal?.clsi?.port or 3013), host = (Settings.internal?.clsi?.host or "localhost"), (error) ->
logger.info "CLSI starting up, listening on #{host}:#{port}"
setInterval () ->
ProjectPersistenceManager.clearExpiredProjects()
, tenMinutes = 10 * 60 * 1000

351
app.js Normal file
View File

@@ -0,0 +1,351 @@
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS103: Rewrite code to no longer use __guard__
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const tenMinutes = 10 * 60 * 1000
const Metrics = require('metrics-sharelatex')
Metrics.initialize('clsi')
const CompileController = require('./app/js/CompileController')
const Settings = require('settings-sharelatex')
const logger = require('logger-sharelatex')
logger.initialize('clsi')
if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) {
logger.initializeErrorReporting(Settings.sentry.dsn)
}
const smokeTest = require('./test/smoke/js/SmokeTests')
const ContentTypeMapper = require('./app/js/ContentTypeMapper')
const Errors = require('./app/js/Errors')
const Path = require('path')
Metrics.open_sockets.monitor(logger)
Metrics.memory.monitor(logger)
const ProjectPersistenceManager = require('./app/js/ProjectPersistenceManager')
const OutputCacheManager = require('./app/js/OutputCacheManager')
require('./app/js/db').sync()
const express = require('express')
const bodyParser = require('body-parser')
const app = express()
Metrics.injectMetricsRoute(app)
app.use(Metrics.http.monitor(logger))
// Compile requests can take longer than the default two
// minutes (including file download time), so bump up the
// timeout a bit.
const TIMEOUT = 10 * 60 * 1000
app.use(function(req, res, next) {
req.setTimeout(TIMEOUT)
res.setTimeout(TIMEOUT)
res.removeHeader('X-Powered-By')
return next()
})
app.param('project_id', function(req, res, next, projectId) {
if (projectId != null ? projectId.match(/^[a-zA-Z0-9_-]+$/) : undefined) {
return next()
} else {
return next(new Error('invalid project id'))
}
})
app.param('user_id', function(req, res, next, userId) {
if (userId != null ? userId.match(/^[0-9a-f]{24}$/) : undefined) {
return next()
} else {
return next(new Error('invalid user id'))
}
})
app.param('build_id', function(req, res, next, buildId) {
if (
buildId != null ? buildId.match(OutputCacheManager.BUILD_REGEX) : undefined
) {
return next()
} else {
return next(new Error(`invalid build id ${buildId}`))
}
})
app.post(
'/project/:project_id/compile',
bodyParser.json({ limit: Settings.compileSizeLimit }),
CompileController.compile
)
app.post('/project/:project_id/compile/stop', CompileController.stopCompile)
app.delete('/project/:project_id', CompileController.clearCache)
app.get('/project/:project_id/sync/code', CompileController.syncFromCode)
app.get('/project/:project_id/sync/pdf', CompileController.syncFromPdf)
app.get('/project/:project_id/wordcount', CompileController.wordcount)
app.get('/project/:project_id/status', CompileController.status)
// Per-user containers
app.post(
'/project/:project_id/user/:user_id/compile',
bodyParser.json({ limit: Settings.compileSizeLimit }),
CompileController.compile
)
app.post(
'/project/:project_id/user/:user_id/compile/stop',
CompileController.stopCompile
)
app.delete('/project/:project_id/user/:user_id', CompileController.clearCache)
app.get(
'/project/:project_id/user/:user_id/sync/code',
CompileController.syncFromCode
)
app.get(
'/project/:project_id/user/:user_id/sync/pdf',
CompileController.syncFromPdf
)
app.get(
'/project/:project_id/user/:user_id/wordcount',
CompileController.wordcount
)
const ForbidSymlinks = require('./app/js/StaticServerForbidSymlinks')
// create a static server which does not allow access to any symlinks
// avoids possible mismatch of root directory between middleware check
// and serving the files
const staticServer = ForbidSymlinks(express.static, Settings.path.compilesDir, {
setHeaders(res, path, stat) {
if (Path.basename(path) === 'output.pdf') {
// Calculate an etag in the same way as nginx
// https://github.com/tj/send/issues/65
const etag = (path, stat) =>
`"${Math.ceil(+stat.mtime / 1000).toString(16)}` +
'-' +
Number(stat.size).toString(16) +
'"'
res.set('Etag', etag(path, stat))
}
return res.set('Content-Type', ContentTypeMapper.map(path))
}
})
app.get('/project/:project_id/user/:user_id/build/:build_id/output/*', function(
req,
res,
next
) {
// for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
req.url =
`/${req.params.project_id}-${req.params.user_id}/` +
OutputCacheManager.path(req.params.build_id, `/${req.params[0]}`)
return staticServer(req, res, next)
})
app.get('/project/:project_id/build/:build_id/output/*', function(
req,
res,
next
) {
// for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
req.url =
`/${req.params.project_id}/` +
OutputCacheManager.path(req.params.build_id, `/${req.params[0]}`)
return staticServer(req, res, next)
})
app.get('/project/:project_id/user/:user_id/output/*', function(
req,
res,
next
) {
// for specific user get the path to the top level file
req.url = `/${req.params.project_id}-${req.params.user_id}/${req.params[0]}`
return staticServer(req, res, next)
})
app.get('/project/:project_id/output/*', function(req, res, next) {
if (
(req.query != null ? req.query.build : undefined) != null &&
req.query.build.match(OutputCacheManager.BUILD_REGEX)
) {
// for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
req.url =
`/${req.params.project_id}/` +
OutputCacheManager.path(req.query.build, `/${req.params[0]}`)
} else {
req.url = `/${req.params.project_id}/${req.params[0]}`
}
return staticServer(req, res, next)
})
app.get('/oops', function(req, res, next) {
logger.error({ err: 'hello' }, 'test error')
return res.send('error\n')
})
app.get('/status', (req, res, next) => res.send('CLSI is alive\n'))
Settings.processTooOld = false
if (Settings.processLifespanLimitMs) {
Settings.processLifespanLimitMs +=
Settings.processLifespanLimitMs * (Math.random() / 10)
logger.info(
'Lifespan limited to ',
Date.now() + Settings.processLifespanLimitMs
)
setTimeout(() => {
logger.log('shutting down, process is too old')
Settings.processTooOld = true
}, Settings.processLifespanLimitMs)
}
function runSmokeTest() {
if (Settings.processTooOld) return
logger.log('running smoke tests')
smokeTest.triggerRun(err => {
if (err) logger.error({ err }, 'smoke tests failed')
setTimeout(runSmokeTest, 30 * 1000)
})
}
if (Settings.smokeTest) {
runSmokeTest()
}
app.get('/health_check', function(req, res) {
if (Settings.processTooOld) {
return res.status(500).json({ processTooOld: true })
}
smokeTest.sendLastResult(res)
})
app.get('/smoke_test_force', (req, res) => smokeTest.sendNewResult(res))
app.use(function(error, req, res, next) {
if (error instanceof Errors.NotFoundError) {
logger.log({ err: error, url: req.url }, 'not found error')
return res.sendStatus(404)
} else if (error.code === 'EPIPE') {
// inspect container returns EPIPE when shutting down
return res.sendStatus(503) // send 503 Unavailable response
} else {
logger.error({ err: error, url: req.url }, 'server error')
return res.sendStatus((error != null ? error.statusCode : undefined) || 500)
}
})
const net = require('net')
const os = require('os')
let STATE = 'up'
const loadTcpServer = net.createServer(function(socket) {
socket.on('error', function(err) {
if (err.code === 'ECONNRESET') {
// this always comes up, we don't know why
return
}
logger.err({ err }, 'error with socket on load check')
return socket.destroy()
})
if (STATE === 'up' && Settings.internal.load_balancer_agent.report_load) {
let availableWorkingCpus
const currentLoad = os.loadavg()[0]
// staging clis's have 1 cpu core only
if (os.cpus().length === 1) {
availableWorkingCpus = 1
} else {
availableWorkingCpus = os.cpus().length - 1
}
const freeLoad = availableWorkingCpus - currentLoad
let freeLoadPercentage = Math.round((freeLoad / availableWorkingCpus) * 100)
if (freeLoadPercentage <= 0) {
freeLoadPercentage = 1 // when its 0 the server is set to drain and will move projects to different servers
}
socket.write(`up, ${freeLoadPercentage}%\n`, 'ASCII')
return socket.end()
} else {
socket.write(`${STATE}\n`, 'ASCII')
return socket.end()
}
})
const loadHttpServer = express()
loadHttpServer.post('/state/up', function(req, res, next) {
STATE = 'up'
logger.info('getting message to set server to down')
return res.sendStatus(204)
})
loadHttpServer.post('/state/down', function(req, res, next) {
STATE = 'down'
logger.info('getting message to set server to down')
return res.sendStatus(204)
})
loadHttpServer.post('/state/maint', function(req, res, next) {
STATE = 'maint'
logger.info('getting message to set server to maint')
return res.sendStatus(204)
})
const port =
__guard__(
Settings.internal != null ? Settings.internal.clsi : undefined,
x => x.port
) || 3013
const host =
__guard__(
Settings.internal != null ? Settings.internal.clsi : undefined,
x1 => x1.host
) || 'localhost'
const loadTcpPort = Settings.internal.load_balancer_agent.load_port
const loadHttpPort = Settings.internal.load_balancer_agent.local_port
if (!module.parent) {
// Called directly
app.listen(port, host, error => {
if (error) {
logger.fatal({ error }, `Error starting CLSI on ${host}:${port}`)
} else {
logger.info(`CLSI starting up, listening on ${host}:${port}`)
}
})
loadTcpServer.listen(loadTcpPort, host, function(error) {
if (error != null) {
throw error
}
return logger.info(`Load tcp agent listening on load port ${loadTcpPort}`)
})
loadHttpServer.listen(loadHttpPort, host, function(error) {
if (error != null) {
throw error
}
return logger.info(`Load http agent listening on load port ${loadHttpPort}`)
})
}
module.exports = app
setInterval(() => {
ProjectPersistenceManager.refreshExpiryTimeout()
ProjectPersistenceManager.clearExpiredProjects()
}, tenMinutes)
function __guard__(value, transform) {
return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
}

View File

@@ -1,44 +0,0 @@
spawn = require("child_process").spawn
logger = require "logger-sharelatex"
logger.info "using standard command runner"
module.exports = CommandRunner =
run: (project_id, command, directory, image, timeout, environment, callback = (error) ->) ->
command = (arg.replace('$COMPILE_DIR', directory) for arg in command)
logger.log project_id: project_id, command: command, directory: directory, "running command"
logger.warn "timeouts and sandboxing are not enabled with CommandRunner"
# merge environment settings
env = {}
env[key] = value for key, value of process.env
env[key] = value for key, value of environment
# run command as detached process so it has its own process group (which can be killed if needed)
proc = spawn command[0], command.slice(1), stdio: "inherit", cwd: directory, detached: true, env: env
proc.on "error", (err)->
logger.err err:err, project_id:project_id, command: command, directory: directory, "error running command"
callback(err)
proc.on "close", (code, signal) ->
logger.info code:code, signal:signal, project_id:project_id, "command exited"
if signal is 'SIGTERM' # signal from kill method below
err = new Error("terminated")
err.terminated = true
return callback(err)
else if code is 1 # exit status from chktex
err = new Error("exited")
err.code = code
return callback(err)
else
callback()
return proc.pid # return process id to allow job to be killed if necessary
kill: (pid, callback = (error) ->) ->
try
process.kill -pid # kill all processes in group
catch err
return callback(err)
callback()

View File

@@ -1,115 +0,0 @@
RequestParser = require "./RequestParser"
CompileManager = require "./CompileManager"
Settings = require "settings-sharelatex"
Metrics = require "./Metrics"
ProjectPersistenceManager = require "./ProjectPersistenceManager"
logger = require "logger-sharelatex"
Errors = require "./Errors"
module.exports = CompileController =
compile: (req, res, next = (error) ->) ->
timer = new Metrics.Timer("compile-request")
RequestParser.parse req.body, (error, request) ->
return next(error) if error?
request.project_id = req.params.project_id
request.user_id = req.params.user_id if req.params.user_id?
ProjectPersistenceManager.markProjectAsJustAccessed request.project_id, (error) ->
return next(error) if error?
CompileManager.doCompileWithLock request, (error, outputFiles = []) ->
if error instanceof Errors.AlreadyCompilingError
code = 423 # Http 423 Locked
status = "compile-in-progress"
else if error instanceof Errors.FilesOutOfSyncError
code = 409 # Http 409 Conflict
status = "retry"
else if error?.terminated
status = "terminated"
else if error?.validate
status = "validation-#{error.validate}"
else if error?
if error.timedout
status = "timedout"
logger.log err: error, project_id: request.project_id, "timeout running compile"
else
status = "error"
code = 500
logger.error err: error, project_id: request.project_id, "error running compile"
else
status = "failure"
for file in outputFiles
if file.path?.match(/output\.pdf$/)
status = "success"
# log an error if any core files are found
for file in outputFiles
if file.path is "core"
logger.error project_id:request.project_id, req:req, outputFiles:outputFiles, "core file found in output"
timer.done()
res.status(code or 200).send {
compile:
status: status
error: error?.message or error
outputFiles: outputFiles.map (file) ->
url:
"#{Settings.apis.clsi.url}/project/#{request.project_id}" +
(if request.user_id? then "/user/#{request.user_id}" else "") +
(if file.build? then "/build/#{file.build}" else "") +
"/output/#{file.path}"
path: file.path
type: file.type
build: file.build
}
stopCompile: (req, res, next) ->
{project_id, user_id} = req.params
CompileManager.stopCompile project_id, user_id, (error) ->
return next(error) if error?
res.sendStatus(204)
clearCache: (req, res, next = (error) ->) ->
ProjectPersistenceManager.clearProject req.params.project_id, req.params.user_id, (error) ->
return next(error) if error?
res.sendStatus(204) # No content
syncFromCode: (req, res, next = (error) ->) ->
file = req.query.file
line = parseInt(req.query.line, 10)
column = parseInt(req.query.column, 10)
project_id = req.params.project_id
user_id = req.params.user_id
CompileManager.syncFromCode project_id, user_id, file, line, column, (error, pdfPositions) ->
return next(error) if error?
res.send JSON.stringify {
pdf: pdfPositions
}
syncFromPdf: (req, res, next = (error) ->) ->
page = parseInt(req.query.page, 10)
h = parseFloat(req.query.h)
v = parseFloat(req.query.v)
project_id = req.params.project_id
user_id = req.params.user_id
CompileManager.syncFromPdf project_id, user_id, page, h, v, (error, codePositions) ->
return next(error) if error?
res.send JSON.stringify {
code: codePositions
}
wordcount: (req, res, next = (error) ->) ->
file = req.query.file || "main.tex"
project_id = req.params.project_id
user_id = req.params.user_id
image = req.query.image
logger.log {image, file, project_id}, "word count request"
CompileManager.wordcount project_id, user_id, file, image, (error, result) ->
return next(error) if error?
res.send JSON.stringify {
texcount: result
}
status: (req, res, next = (error)-> )->
res.send("OK")

View File

@@ -1,332 +0,0 @@
ResourceWriter = require "./ResourceWriter"
LatexRunner = require "./LatexRunner"
OutputFileFinder = require "./OutputFileFinder"
OutputCacheManager = require "./OutputCacheManager"
Settings = require("settings-sharelatex")
Path = require "path"
logger = require "logger-sharelatex"
Metrics = require "./Metrics"
child_process = require "child_process"
DraftModeManager = require "./DraftModeManager"
TikzManager = require "./TikzManager"
LockManager = require "./LockManager"
fs = require("fs")
fse = require "fs-extra"
os = require("os")
async = require "async"
Errors = require './Errors'
commandRunner = Settings.clsi?.commandRunner or "./CommandRunner"
logger.info commandRunner:commandRunner, "selecting command runner for clsi"
CommandRunner = require(commandRunner)
getCompileName = (project_id, user_id) ->
if user_id? then "#{project_id}-#{user_id}" else project_id
getCompileDir = (project_id, user_id) ->
Path.join(Settings.path.compilesDir, getCompileName(project_id, user_id))
module.exports = CompileManager =
doCompileWithLock: (request, callback = (error, outputFiles) ->) ->
compileDir = getCompileDir(request.project_id, request.user_id)
lockFile = Path.join(compileDir, ".project-lock")
# use a .project-lock file in the compile directory to prevent
# simultaneous compiles
fse.ensureDir compileDir, (error) ->
return callback(error) if error?
LockManager.runWithLock lockFile, (releaseLock) ->
CompileManager.doCompile(request, releaseLock)
, callback
doCompile: (request, callback = (error, outputFiles) ->) ->
compileDir = getCompileDir(request.project_id, request.user_id)
timer = new Metrics.Timer("write-to-disk")
logger.log project_id: request.project_id, user_id: request.user_id, "syncing resources to disk"
ResourceWriter.syncResourcesToDisk request, compileDir, (error, resourceList) ->
# NOTE: resourceList is insecure, it should only be used to exclude files from the output list
if error? and error instanceof Errors.FilesOutOfSyncError
logger.warn project_id: request.project_id, user_id: request.user_id, "files out of sync, please retry"
return callback(error)
else if error?
logger.err err:error, project_id: request.project_id, user_id: request.user_id, "error writing resources to disk"
return callback(error)
logger.log project_id: request.project_id, user_id: request.user_id, time_taken: Date.now() - timer.start, "written files to disk"
timer.done()
injectDraftModeIfRequired = (callback) ->
if request.draft
DraftModeManager.injectDraftMode Path.join(compileDir, request.rootResourcePath), callback
else
callback()
createTikzFileIfRequired = (callback) ->
TikzManager.checkMainFile compileDir, request.rootResourcePath, resourceList, (error, usesTikzExternalize) ->
return callback(error) if error?
if usesTikzExternalize
TikzManager.injectOutputFile compileDir, request.rootResourcePath, callback
else
callback()
# set up environment variables for chktex
env = {}
# only run chktex on LaTeX files (not knitr .Rtex files or any others)
isLaTeXFile = request.rootResourcePath?.match(/\.tex$/i)
if request.check? and isLaTeXFile
env['CHKTEX_OPTIONS'] = '-nall -e9 -e10 -w15 -w16'
env['CHKTEX_ULIMIT_OPTIONS'] = '-t 5 -v 64000'
if request.check is 'error'
env['CHKTEX_EXIT_ON_ERROR'] = 1
if request.check is 'validate'
env['CHKTEX_VALIDATE'] = 1
# apply a series of file modifications/creations for draft mode and tikz
async.series [injectDraftModeIfRequired, createTikzFileIfRequired], (error) ->
return callback(error) if error?
timer = new Metrics.Timer("run-compile")
# find the image tag to log it as a metric, e.g. 2015.1 (convert . to - for graphite)
tag = request.imageName?.match(/:(.*)/)?[1]?.replace(/\./g,'-') or "default"
tag = "other" if not request.project_id.match(/^[0-9a-f]{24}$/) # exclude smoke test
Metrics.inc("compiles")
Metrics.inc("compiles-with-image.#{tag}")
compileName = getCompileName(request.project_id, request.user_id)
LatexRunner.runLatex compileName, {
directory: compileDir
mainFile: request.rootResourcePath
compiler: request.compiler
timeout: request.timeout
image: request.imageName
environment: env
}, (error, output, stats, timings) ->
# request was for validation only
if request.check is "validate"
result = if error?.code then "fail" else "pass"
error = new Error("validation")
error.validate = result
# request was for compile, and failed on validation
if request.check is "error" and error?.message is 'exited'
error = new Error("compilation")
error.validate = "fail"
# compile was killed by user, was a validation, or a compile which failed validation
if error?.terminated or error?.validate
OutputFileFinder.findOutputFiles resourceList, compileDir, (err, outputFiles) ->
return callback(err) if err?
callback(error, outputFiles) # return output files so user can check logs
return
# compile completed normally
return callback(error) if error?
Metrics.inc("compiles-succeeded")
for metric_key, metric_value of stats or {}
Metrics.count(metric_key, metric_value)
for metric_key, metric_value of timings or {}
Metrics.timing(metric_key, metric_value)
loadavg = os.loadavg?()
Metrics.gauge("load-avg", loadavg[0]) if loadavg?
ts = timer.done()
logger.log {project_id: request.project_id, user_id: request.user_id, time_taken: ts, stats:stats, timings:timings, loadavg:loadavg}, "done compile"
if stats?["latex-runs"] > 0
Metrics.timing("run-compile-per-pass", ts / stats["latex-runs"])
if stats?["latex-runs"] > 0 and timings?["cpu-time"] > 0
Metrics.timing("run-compile-cpu-time-per-pass", timings["cpu-time"] / stats["latex-runs"])
OutputFileFinder.findOutputFiles resourceList, compileDir, (error, outputFiles) ->
return callback(error) if error?
OutputCacheManager.saveOutputFiles outputFiles, compileDir, (error, newOutputFiles) ->
callback null, newOutputFiles
stopCompile: (project_id, user_id, callback = (error) ->) ->
compileName = getCompileName(project_id, user_id)
LatexRunner.killLatex compileName, callback
clearProject: (project_id, user_id, _callback = (error) ->) ->
callback = (error) ->
_callback(error)
_callback = () ->
compileDir = getCompileDir(project_id, user_id)
CompileManager._checkDirectory compileDir, (err, exists) ->
return callback(err) if err?
return callback() if not exists # skip removal if no directory present
proc = child_process.spawn "rm", ["-r", compileDir]
proc.on "error", callback
stderr = ""
proc.stderr.on "data", (chunk) -> stderr += chunk.toString()
proc.on "close", (code) ->
if code == 0
return callback(null)
else
return callback(new Error("rm -r #{compileDir} failed: #{stderr}"))
_findAllDirs: (callback = (error, allDirs) ->) ->
root = Settings.path.compilesDir
fs.readdir root, (err, files) ->
return callback(err) if err?
allDirs = (Path.join(root, file) for file in files)
callback(null, allDirs)
clearExpiredProjects: (max_cache_age_ms, callback = (error) ->) ->
now = Date.now()
# action for each directory
expireIfNeeded = (checkDir, cb) ->
fs.stat checkDir, (err, stats) ->
return cb() if err? # ignore errors checking directory
age = now - stats.mtime
hasExpired = (age > max_cache_age_ms)
if hasExpired then fse.remove(checkDir, cb) else cb()
# iterate over all project directories
CompileManager._findAllDirs (error, allDirs) ->
return callback() if error?
async.eachSeries allDirs, expireIfNeeded, callback
_checkDirectory: (compileDir, callback = (error, exists) ->) ->
fs.lstat compileDir, (err, stats) ->
if err?.code is 'ENOENT'
return callback(null, false) # directory does not exist
else if err?
logger.err {dir: compileDir, err:err}, "error on stat of project directory for removal"
return callback(err)
else if not stats?.isDirectory()
logger.err {dir: compileDir, stats:stats}, "bad project directory for removal"
return callback new Error("project directory is not directory")
else
callback(null, true) # directory exists
syncFromCode: (project_id, user_id, file_name, line, column, callback = (error, pdfPositions) ->) ->
# If LaTeX was run in a virtual environment, the file path that synctex expects
# might not match the file path on the host. The .synctex.gz file however, will be accessed
# wherever it is on the host.
compileName = getCompileName(project_id, user_id)
base_dir = Settings.path.synctexBaseDir(compileName)
file_path = base_dir + "/" + file_name
compileDir = getCompileDir(project_id, user_id)
synctex_path = Path.join(compileDir, "output.pdf")
CompileManager._runSynctex ["code", synctex_path, file_path, line, column], (error, stdout) ->
return callback(error) if error?
logger.log project_id: project_id, user_id:user_id, file_name: file_name, line: line, column: column, stdout: stdout, "synctex code output"
callback null, CompileManager._parseSynctexFromCodeOutput(stdout)
syncFromPdf: (project_id, user_id, page, h, v, callback = (error, filePositions) ->) ->
compileName = getCompileName(project_id, user_id)
base_dir = Settings.path.synctexBaseDir(compileName)
compileDir = getCompileDir(project_id, user_id)
synctex_path = Path.join(compileDir, "output.pdf")
CompileManager._runSynctex ["pdf", synctex_path, page, h, v], (error, stdout) ->
return callback(error) if error?
logger.log project_id: project_id, user_id:user_id, page: page, h: h, v:v, stdout: stdout, "synctex pdf output"
callback null, CompileManager._parseSynctexFromPdfOutput(stdout, base_dir)
_checkFileExists: (path, callback = (error) ->) ->
synctexDir = Path.dirname(path)
synctexFile = Path.join(synctexDir, "output.synctex.gz")
fs.stat synctexDir, (error, stats) ->
if error?.code is 'ENOENT'
return callback(new Errors.NotFoundError("called synctex with no output directory"))
return callback(error) if error?
fs.stat synctexFile, (error, stats) ->
if error?.code is 'ENOENT'
return callback(new Errors.NotFoundError("called synctex with no output file"))
return callback(error) if error?
return callback(new Error("not a file")) if not stats?.isFile()
callback()
_runSynctex: (args, callback = (error, stdout) ->) ->
bin_path = Path.resolve(__dirname + "/../../bin/synctex")
seconds = 1000
outputFilePath = args[1]
CompileManager._checkFileExists outputFilePath, (error) ->
return callback(error) if error?
if Settings.clsi?.synctexCommandWrapper?
[bin_path, args] = Settings.clsi?.synctexCommandWrapper bin_path, args
child_process.execFile bin_path, args, timeout: 10 * seconds, (error, stdout, stderr) ->
if error?
logger.err err:error, args:args, "error running synctex"
return callback(error)
callback(null, stdout)
_parseSynctexFromCodeOutput: (output) ->
results = []
for line in output.split("\n")
[node, page, h, v, width, height] = line.split("\t")
if node == "NODE"
results.push {
page: parseInt(page, 10)
h: parseFloat(h)
v: parseFloat(v)
height: parseFloat(height)
width: parseFloat(width)
}
return results
_parseSynctexFromPdfOutput: (output, base_dir) ->
results = []
for line in output.split("\n")
[node, file_path, line, column] = line.split("\t")
if node == "NODE"
file = file_path.slice(base_dir.length + 1)
results.push {
file: file
line: parseInt(line, 10)
column: parseInt(column, 10)
}
return results
wordcount: (project_id, user_id, file_name, image, callback = (error, pdfPositions) ->) ->
logger.log project_id:project_id, user_id:user_id, file_name:file_name, image:image, "running wordcount"
file_path = "$COMPILE_DIR/" + file_name
command = [ "texcount", '-nocol', '-inc', file_path, "-out=" + file_path + ".wc"]
directory = getCompileDir(project_id, user_id)
timeout = 10 * 1000
compileName = getCompileName(project_id, user_id)
CommandRunner.run compileName, command, directory, image, timeout, {}, (error) ->
return callback(error) if error?
fs.readFile directory + "/" + file_name + ".wc", "utf-8", (err, stdout) ->
if err?
logger.err err:err, command:command, directory:directory, project_id:project_id, user_id:user_id, "error reading word count output"
return callback(err)
results = CompileManager._parseWordcountFromOutput(stdout)
logger.log project_id:project_id, user_id:user_id, wordcount: results, "word count results"
callback null, results
_parseWordcountFromOutput: (output) ->
results = {
encode: ""
textWords: 0
headWords: 0
outside: 0
headers: 0
elements: 0
mathInline: 0
mathDisplay: 0
errors: 0
messages: ""
}
for line in output.split("\n")
[data, info] = line.split(":")
if data.indexOf("Encoding") > -1
results['encode'] = info.trim()
if data.indexOf("in text") > -1
results['textWords'] = parseInt(info, 10)
if data.indexOf("in head") > -1
results['headWords'] = parseInt(info, 10)
if data.indexOf("outside") > -1
results['outside'] = parseInt(info, 10)
if data.indexOf("of head") > -1
results['headers'] = parseInt(info, 10)
if data.indexOf("Number of floats/tables/figures") > -1
results['elements'] = parseInt(info, 10)
if data.indexOf("Number of math inlines") > -1
results['mathInline'] = parseInt(info, 10)
if data.indexOf("Number of math displayed") > -1
results['mathDisplay'] = parseInt(info, 10)
if data is "(errors" # errors reported as (errors:123)
results['errors'] = parseInt(info, 10)
if line.indexOf("!!! ") > -1 # errors logged as !!! message !!!
results['messages'] += line + "\n"
return results

View File

@@ -1,24 +0,0 @@
Path = require 'path'
# here we coerce html, css and js to text/plain,
# otherwise choose correct mime type based on file extension,
# falling back to octet-stream
module.exports = ContentTypeMapper =
map: (path) ->
switch Path.extname(path)
when '.txt', '.html', '.js', '.css', '.svg'
return 'text/plain'
when '.csv'
return 'text/csv'
when '.pdf'
return 'application/pdf'
when '.png'
return 'image/png'
when '.jpg', '.jpeg'
return 'image/jpeg'
when '.tiff'
return 'image/tiff'
when '.gif'
return 'image/gif'
else
return 'application/octet-stream'

View File

@@ -1,24 +0,0 @@
fs = require "fs"
logger = require "logger-sharelatex"
module.exports = DraftModeManager =
injectDraftMode: (filename, callback = (error) ->) ->
fs.readFile filename, "utf8", (error, content) ->
return callback(error) if error?
# avoid adding draft mode more than once
if content?.indexOf("\\documentclass\[draft") >= 0
return callback()
modified_content = DraftModeManager._injectDraftOption content
logger.log {
content: content.slice(0,1024), # \documentclass is normally v near the top
modified_content: modified_content.slice(0,1024),
filename
}, "injected draft class"
fs.writeFile filename, modified_content, callback
_injectDraftOption: (content) ->
content
# With existing options (must be first, otherwise both are applied)
.replace(/\\documentclass\[/g, "\\documentclass[draft,")
# Without existing options
.replace(/\\documentclass\{/g, "\\documentclass[draft]{")

View File

@@ -1,25 +0,0 @@
NotFoundError = (message) ->
error = new Error(message)
error.name = "NotFoundError"
error.__proto__ = NotFoundError.prototype
return error
NotFoundError.prototype.__proto__ = Error.prototype
FilesOutOfSyncError = (message) ->
error = new Error(message)
error.name = "FilesOutOfSyncError"
error.__proto__ = FilesOutOfSyncError.prototype
return error
FilesOutOfSyncError.prototype.__proto__ = Error.prototype
AlreadyCompilingError = (message) ->
error = new Error(message)
error.name = "AlreadyCompilingError"
error.__proto__ = AlreadyCompilingError.prototype
return error
AlreadyCompilingError.prototype.__proto__ = Error.prototype
module.exports = Errors =
NotFoundError: NotFoundError
FilesOutOfSyncError: FilesOutOfSyncError
AlreadyCompilingError: AlreadyCompilingError

View File

@@ -1,94 +0,0 @@
Path = require "path"
Settings = require "settings-sharelatex"
logger = require "logger-sharelatex"
Metrics = require "./Metrics"
CommandRunner = require(Settings.clsi?.commandRunner or "./CommandRunner")
ProcessTable = {} # table of currently running jobs (pids or docker container names)
module.exports = LatexRunner =
runLatex: (project_id, options, callback = (error) ->) ->
{directory, mainFile, compiler, timeout, image, environment} = options
compiler ||= "pdflatex"
timeout ||= 60000 # milliseconds
logger.log directory: directory, compiler: compiler, timeout: timeout, mainFile: mainFile, environment: environment, "starting compile"
# We want to run latexmk on the tex file which we will automatically
# generate from the Rtex/Rmd/md file.
mainFile = mainFile.replace(/\.(Rtex|md|Rmd)$/, ".tex")
if compiler == "pdflatex"
command = LatexRunner._pdflatexCommand mainFile
else if compiler == "latex"
command = LatexRunner._latexCommand mainFile
else if compiler == "xelatex"
command = LatexRunner._xelatexCommand mainFile
else if compiler == "lualatex"
command = LatexRunner._lualatexCommand mainFile
else
return callback new Error("unknown compiler: #{compiler}")
if Settings.clsi?.strace
command = ["strace", "-o", "strace", "-ff"].concat(command)
id = "#{project_id}" # record running project under this id
ProcessTable[id] = CommandRunner.run project_id, command, directory, image, timeout, environment, (error, output) ->
delete ProcessTable[id]
return callback(error) if error?
runs = output?.stderr?.match(/^Run number \d+ of .*latex/mg)?.length or 0
failed = if output?.stdout?.match(/^Latexmk: Errors/m)? then 1 else 0
# counters from latexmk output
stats = {}
stats["latexmk-errors"] = failed
stats["latex-runs"] = runs
stats["latex-runs-with-errors"] = if failed then runs else 0
stats["latex-runs-#{runs}"] = 1
stats["latex-runs-with-errors-#{runs}"] = if failed then 1 else 0
# timing information from /usr/bin/time
timings = {}
stderr = output?.stderr
timings["cpu-percent"] = stderr?.match(/Percent of CPU this job got: (\d+)/m)?[1] or 0
timings["cpu-time"] = stderr?.match(/User time.*: (\d+.\d+)/m)?[1] or 0
timings["sys-time"] = stderr?.match(/System time.*: (\d+.\d+)/m)?[1] or 0
callback error, output, stats, timings
killLatex: (project_id, callback = (error) ->) ->
id = "#{project_id}"
logger.log {id:id}, "killing running compile"
if not ProcessTable[id]?
logger.warn {id}, "no such project to kill"
return callback(null)
else
CommandRunner.kill ProcessTable[id], callback
_latexmkBaseCommand: (Settings?.clsi?.latexmkCommandPrefix || []).concat([
"latexmk", "-cd", "-f", "-jobname=output", "-auxdir=$COMPILE_DIR", "-outdir=$COMPILE_DIR",
"-synctex=1","-interaction=batchmode"
])
_pdflatexCommand: (mainFile) ->
LatexRunner._latexmkBaseCommand.concat [
"-pdf",
Path.join("$COMPILE_DIR", mainFile)
]
_latexCommand: (mainFile) ->
LatexRunner._latexmkBaseCommand.concat [
"-pdfdvi",
Path.join("$COMPILE_DIR", mainFile)
]
_xelatexCommand: (mainFile) ->
LatexRunner._latexmkBaseCommand.concat [
"-xelatex",
Path.join("$COMPILE_DIR", mainFile)
]
_lualatexCommand: (mainFile) ->
LatexRunner._latexmkBaseCommand.concat [
"-lualatex",
Path.join("$COMPILE_DIR", mainFile)
]

View File

@@ -1,23 +0,0 @@
Settings = require('settings-sharelatex')
logger = require "logger-sharelatex"
Lockfile = require('lockfile') # from https://github.com/npm/lockfile
Errors = require "./Errors"
module.exports = LockManager =
LOCK_TEST_INTERVAL: 1000 # 50ms between each test of the lock
MAX_LOCK_WAIT_TIME: 15000 # 10s maximum time to spend trying to get the lock
LOCK_STALE: 5*60*1000 # 5 mins time until lock auto expires
runWithLock: (path, runner = ((releaseLock = (error) ->) ->), callback = ((error) ->)) ->
lockOpts =
wait: @MAX_LOCK_WAIT_TIME
pollPeriod: @LOCK_TEST_INTERVAL
stale: @LOCK_STALE
Lockfile.lock path, lockOpts, (error) ->
return callback new Errors.AlreadyCompilingError("compile in progress") if error?.code is 'EEXIST'
return callback(error) if error?
runner (error1, args...) ->
Lockfile.unlock path, (error2) ->
error = error1 or error2
return callback(error) if error?
callback(null, args...)

View File

@@ -1,2 +0,0 @@
module.exports = require "metrics-sharelatex"

View File

@@ -1,199 +0,0 @@
async = require "async"
fs = require "fs"
fse = require "fs-extra"
Path = require "path"
logger = require "logger-sharelatex"
_ = require "underscore"
Settings = require "settings-sharelatex"
crypto = require "crypto"
OutputFileOptimiser = require "./OutputFileOptimiser"
module.exports = OutputCacheManager =
CACHE_SUBDIR: '.cache/clsi'
ARCHIVE_SUBDIR: '.archive/clsi'
# build id is HEXDATE-HEXRANDOM from Date.now()and RandomBytes
# for backwards compatibility, make the randombytes part optional
BUILD_REGEX: /^[0-9a-f]+(-[0-9a-f]+)?$/
CACHE_LIMIT: 2 # maximum number of cache directories
CACHE_AGE: 60*60*1000 # up to one hour old
path: (buildId, file) ->
# used by static server, given build id return '.cache/clsi/buildId'
if buildId.match OutputCacheManager.BUILD_REGEX
return Path.join(OutputCacheManager.CACHE_SUBDIR, buildId, file)
else
# for invalid build id, return top level
return file
generateBuildId: (callback = (error, buildId) ->) ->
# generate a secure build id from Date.now() and 8 random bytes in hex
crypto.randomBytes 8, (err, buf) ->
return callback(err) if err?
random = buf.toString('hex')
date = Date.now().toString(16)
callback err, "#{date}-#{random}"
saveOutputFiles: (outputFiles, compileDir, callback = (error) ->) ->
OutputCacheManager.generateBuildId (err, buildId) ->
return callback(err) if err?
OutputCacheManager.saveOutputFilesInBuildDir outputFiles, compileDir, buildId, callback
saveOutputFilesInBuildDir: (outputFiles, compileDir, buildId, callback = (error) ->) ->
# make a compileDir/CACHE_SUBDIR/build_id directory and
# copy all the output files into it
cacheRoot = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR)
# Put the files into a new cache subdirectory
cacheDir = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR, buildId)
# Is it a per-user compile? check if compile directory is PROJECTID-USERID
perUser = Path.basename(compileDir).match(/^[0-9a-f]{24}-[0-9a-f]{24}$/)
# Archive logs in background
if Settings.clsi?.archive_logs or Settings.clsi?.strace
OutputCacheManager.archiveLogs outputFiles, compileDir, buildId, (err) ->
if err?
logger.warn err:err, "erroring archiving log files"
# make the new cache directory
fse.ensureDir cacheDir, (err) ->
if err?
logger.error err: err, directory: cacheDir, "error creating cache directory"
callback(err, outputFiles)
else
# copy all the output files into the new cache directory
results = []
async.mapSeries outputFiles, (file, cb) ->
# don't send dot files as output, express doesn't serve them
if OutputCacheManager._fileIsHidden(file.path)
logger.warn compileDir: compileDir, path: file.path, "ignoring dotfile in output"
return cb()
# copy other files into cache directory if valid
newFile = _.clone(file)
[src, dst] = [Path.join(compileDir, file.path), Path.join(cacheDir, file.path)]
OutputCacheManager._checkFileIsSafe src, (err, isSafe) ->
return cb(err) if err?
if !isSafe
return cb()
OutputCacheManager._checkIfShouldCopy src, (err, shouldCopy) ->
return cb(err) if err?
if !shouldCopy
return cb()
OutputCacheManager._copyFile src, dst, (err) ->
return cb(err) if err?
newFile.build = buildId # attach a build id if we cached the file
results.push newFile
cb()
, (err) ->
if err?
# pass back the original files if we encountered *any* error
callback(err, outputFiles)
# clean up the directory we just created
fse.remove cacheDir, (err) ->
if err?
logger.error err: err, dir: cacheDir, "error removing cache dir after failure"
else
# pass back the list of new files in the cache
callback(err, results)
# let file expiry run in the background, expire all previous files if per-user
OutputCacheManager.expireOutputFiles cacheRoot, {keep: buildId, limit: if perUser then 1 else null}
archiveLogs: (outputFiles, compileDir, buildId, callback = (error) ->) ->
archiveDir = Path.join(compileDir, OutputCacheManager.ARCHIVE_SUBDIR, buildId)
logger.log {dir: archiveDir}, "archiving log files for project"
fse.ensureDir archiveDir, (err) ->
return callback(err) if err?
async.mapSeries outputFiles, (file, cb) ->
[src, dst] = [Path.join(compileDir, file.path), Path.join(archiveDir, file.path)]
OutputCacheManager._checkFileIsSafe src, (err, isSafe) ->
return cb(err) if err?
return cb() if !isSafe
OutputCacheManager._checkIfShouldArchive src, (err, shouldArchive) ->
return cb(err) if err?
return cb() if !shouldArchive
OutputCacheManager._copyFile src, dst, cb
, callback
expireOutputFiles: (cacheRoot, options, callback = (error) ->) ->
# look in compileDir for build dirs and delete if > N or age of mod time > T
fs.readdir cacheRoot, (err, results) ->
if err?
return callback(null) if err.code == 'ENOENT' # cache directory is empty
logger.error err: err, project_id: cacheRoot, "error clearing cache"
return callback(err)
dirs = results.sort().reverse()
currentTime = Date.now()
isExpired = (dir, index) ->
return false if options?.keep == dir
# remove any directories over the requested (non-null) limit
return true if options?.limit? and index > options.limit
# remove any directories over the hard limit
return true if index > OutputCacheManager.CACHE_LIMIT
# we can get the build time from the first part of the directory name DDDD-RRRR
# DDDD is date and RRRR is random bytes
dirTime = parseInt(dir.split('-')?[0], 16)
age = currentTime - dirTime
return age > OutputCacheManager.CACHE_AGE
toRemove = _.filter(dirs, isExpired)
removeDir = (dir, cb) ->
fse.remove Path.join(cacheRoot, dir), (err, result) ->
logger.log cache: cacheRoot, dir: dir, "removed expired cache dir"
if err?
logger.error err: err, dir: dir, "cache remove error"
cb(err, result)
async.eachSeries toRemove, (dir, cb) ->
removeDir dir, cb
, callback
_fileIsHidden: (path) ->
return path?.match(/^\.|\/./)?
_checkFileIsSafe: (src, callback = (error, isSafe) ->) ->
# check if we have a valid file to copy into the cache
fs.stat src, (err, stats) ->
if err?.code is 'ENOENT'
logger.warn err: err, file: src, "file has disappeared before copying to build cache"
callback(err, false)
else if err?
# some other problem reading the file
logger.error err: err, file: src, "stat error for file in cache"
callback(err, false)
else if not stats.isFile()
# other filetype - reject it
logger.warn src: src, stat: stats, "nonfile output - refusing to copy to cache"
callback(null, false)
else
# it's a plain file, ok to copy
callback(null, true)
_copyFile: (src, dst, callback) ->
# copy output file into the cache
fse.copy src, dst, (err) ->
if err?.code is 'ENOENT'
logger.warn err: err, file: src, "file has disappeared when copying to build cache"
callback(err, false)
else if err?
logger.error err: err, src: src, dst: dst, "copy error for file in cache"
callback(err)
else
if Settings.clsi?.optimiseInDocker
# don't run any optimisations on the pdf when they are done
# in the docker container
callback()
else
# call the optimiser for the file too
OutputFileOptimiser.optimiseFile src, dst, callback
_checkIfShouldCopy: (src, callback = (err, shouldCopy) ->) ->
return callback(null, !Path.basename(src).match(/^strace/))
_checkIfShouldArchive: (src, callback = (err, shouldCopy) ->) ->
if Path.basename(src).match(/^strace/)
return callback(null, true)
if Settings.clsi?.archive_logs and Path.basename(src) in ["output.log", "output.blg"]
return callback(null, true)
return callback(null, false)

View File

@@ -1,52 +0,0 @@
async = require "async"
fs = require "fs"
Path = require "path"
spawn = require("child_process").spawn
logger = require "logger-sharelatex"
module.exports = OutputFileFinder =
findOutputFiles: (resources, directory, callback = (error, outputFiles, allFiles) ->) ->
incomingResources = {}
for resource in resources
incomingResources[resource.path] = true
logger.log directory: directory, "getting output files"
OutputFileFinder._getAllFiles directory, (error, allFiles = []) ->
if error?
logger.err err:error, "error finding all output files"
return callback(error)
outputFiles = []
for file in allFiles
if !incomingResources[file]
outputFiles.push {
path: file
type: file.match(/\.([^\.]+)$/)?[1]
}
callback null, outputFiles, allFiles
_getAllFiles: (directory, _callback = (error, fileList) ->) ->
callback = (error, fileList) ->
_callback(error, fileList)
_callback = () ->
# don't include clsi-specific files/directories in the output list
EXCLUDE_DIRS = ["-name", ".cache", "-o", "-name", ".archive","-o", "-name", ".project-*"]
args = [directory, "(", EXCLUDE_DIRS..., ")", "-prune", "-o", "-type", "f", "-print"]
logger.log args: args, "running find command"
proc = spawn("find", args)
stdout = ""
proc.stdout.on "data", (chunk) ->
stdout += chunk.toString()
proc.on "error", callback
proc.on "close", (code) ->
if code != 0
logger.warn {directory, code}, "find returned error, directory likely doesn't exist"
return callback null, []
fileList = stdout.trim().split("\n")
fileList = fileList.map (file) ->
# Strip leading directory
path = Path.relative(directory, file)
return callback null, fileList

View File

@@ -1,55 +0,0 @@
fs = require "fs"
Path = require "path"
spawn = require("child_process").spawn
logger = require "logger-sharelatex"
Metrics = require "./Metrics"
_ = require "underscore"
module.exports = OutputFileOptimiser =
optimiseFile: (src, dst, callback = (error) ->) ->
# check output file (src) and see if we can optimise it, storing
# the result in the build directory (dst)
if src.match(/\/output\.pdf$/)
OutputFileOptimiser.checkIfPDFIsOptimised src, (err, isOptimised) ->
return callback(null) if err? or isOptimised
OutputFileOptimiser.optimisePDF src, dst, callback
else
callback (null)
checkIfPDFIsOptimised: (file, callback) ->
SIZE = 16*1024 # check the header of the pdf
result = new Buffer(SIZE)
result.fill(0) # prevent leakage of uninitialised buffer
fs.open file, "r", (err, fd) ->
return callback(err) if err?
fs.read fd, result, 0, SIZE, 0, (errRead, bytesRead, buffer) ->
fs.close fd, (errClose) ->
return callback(errRead) if errRead?
return callback(errClose) if errReadClose?
isOptimised = buffer.toString('ascii').indexOf("/Linearized 1") >= 0
callback(null, isOptimised)
optimisePDF: (src, dst, callback = (error) ->) ->
tmpOutput = dst + '.opt'
args = ["--linearize", src, tmpOutput]
logger.log args: args, "running qpdf command"
timer = new Metrics.Timer("qpdf")
proc = spawn("qpdf", args)
stdout = ""
proc.stdout.on "data", (chunk) ->
stdout += chunk.toString()
callback = _.once(callback) # avoid double call back for error and close event
proc.on "error", (err) ->
logger.warn {err, args}, "qpdf failed"
callback(null) # ignore the error
proc.on "close", (code) ->
timer.done()
if code != 0
logger.warn {code, args}, "qpdf returned error"
return callback(null) # ignore the error
fs.rename tmpOutput, dst, (err) ->
if err?
logger.warn {tmpOutput, dst}, "failed to rename output of qpdf command"
callback(null) # ignore the error

View File

@@ -1,66 +0,0 @@
UrlCache = require "./UrlCache"
CompileManager = require "./CompileManager"
db = require "./db"
async = require "async"
logger = require "logger-sharelatex"
oneDay = 24 * 60 * 60 * 1000
Settings = require "settings-sharelatex"
module.exports = ProjectPersistenceManager =
EXPIRY_TIMEOUT: Settings.project_cache_length_ms || oneDay * 2.5
markProjectAsJustAccessed: (project_id, callback = (error) ->) ->
db.Project.findOrCreate(where: {project_id: project_id})
.spread(
(project, created) ->
project.updateAttributes(lastAccessed: new Date())
.then(() -> callback())
.error callback
)
.error callback
clearExpiredProjects: (callback = (error) ->) ->
ProjectPersistenceManager._findExpiredProjectIds (error, project_ids) ->
return callback(error) if error?
logger.log project_ids: project_ids, "clearing expired projects"
jobs = for project_id in (project_ids or [])
do (project_id) ->
(callback) ->
ProjectPersistenceManager.clearProjectFromCache project_id, (err) ->
if err?
logger.error err: err, project_id: project_id, "error clearing project"
callback()
async.series jobs, (error) ->
return callback(error) if error?
CompileManager.clearExpiredProjects ProjectPersistenceManager.EXPIRY_TIMEOUT, (error) ->
callback() # ignore any errors from deleting directories
clearProject: (project_id, user_id, callback = (error) ->) ->
logger.log project_id: project_id, user_id:user_id, "clearing project for user"
CompileManager.clearProject project_id, user_id, (error) ->
return callback(error) if error?
ProjectPersistenceManager.clearProjectFromCache project_id, (error) ->
return callback(error) if error?
callback()
clearProjectFromCache: (project_id, callback = (error) ->) ->
logger.log project_id: project_id, "clearing project from cache"
UrlCache.clearProject project_id, (error) ->
return callback(error) if error?
ProjectPersistenceManager._clearProjectFromDatabase project_id, (error) ->
return callback(error) if error?
callback()
_clearProjectFromDatabase: (project_id, callback = (error) ->) ->
db.Project.destroy(where: {project_id: project_id})
.then(() -> callback())
.error callback
_findExpiredProjectIds: (callback = (error, project_ids) ->) ->
db.Project.findAll(where: ["lastAccessed < ?", new Date(Date.now() - ProjectPersistenceManager.EXPIRY_TIMEOUT)])
.then((projects) ->
callback null, projects.map((project) -> project.project_id)
).error callback
logger.log {EXPIRY_TIMEOUT: ProjectPersistenceManager.EXPIRY_TIMEOUT}, "project assets kept timeout"

View File

@@ -1,122 +0,0 @@
module.exports = RequestParser =
VALID_COMPILERS: ["pdflatex", "latex", "xelatex", "lualatex"]
MAX_TIMEOUT: 300
parse: (body, callback = (error, data) ->) ->
response = {}
if !body.compile?
return callback "top level object should have a compile attribute"
compile = body.compile
compile.options ||= {}
try
response.compiler = @_parseAttribute "compiler",
compile.options.compiler,
validValues: @VALID_COMPILERS
default: "pdflatex"
type: "string"
response.timeout = @_parseAttribute "timeout",
compile.options.timeout
default: RequestParser.MAX_TIMEOUT
type: "number"
response.imageName = @_parseAttribute "imageName",
compile.options.imageName,
type: "string"
response.draft = @_parseAttribute "draft",
compile.options.draft,
default: false,
type: "boolean"
response.check = @_parseAttribute "check",
compile.options.check,
type: "string"
# The syncType specifies whether the request contains all
# resources (full) or only those resources to be updated
# in-place (incremental).
response.syncType = @_parseAttribute "syncType",
compile.options.syncType,
validValues: ["full", "incremental"]
type: "string"
# The syncState is an identifier passed in with the request
# which has the property that it changes when any resource is
# added, deleted, moved or renamed.
#
# on syncType full the syncState identifier is passed in and
# stored
#
# on syncType incremental the syncState identifier must match
# the stored value
response.syncState = @_parseAttribute "syncState",
compile.options.syncState,
type: "string"
if response.timeout > RequestParser.MAX_TIMEOUT
response.timeout = RequestParser.MAX_TIMEOUT
response.timeout = response.timeout * 1000 # milliseconds
response.resources = (@_parseResource(resource) for resource in (compile.resources or []))
rootResourcePath = @_parseAttribute "rootResourcePath",
compile.rootResourcePath
default: "main.tex"
type: "string"
originalRootResourcePath = rootResourcePath
sanitizedRootResourcePath = RequestParser._sanitizePath(rootResourcePath)
response.rootResourcePath = RequestParser._checkPath(sanitizedRootResourcePath)
for resource in response.resources
if resource.path == originalRootResourcePath
resource.path = sanitizedRootResourcePath
catch error
return callback error
callback null, response
_parseResource: (resource) ->
if !resource.path? or typeof resource.path != "string"
throw "all resources should have a path attribute"
if resource.modified?
modified = new Date(resource.modified)
if isNaN(modified.getTime())
throw "resource modified date could not be understood: #{resource.modified}"
if !resource.url? and !resource.content?
throw "all resources should have either a url or content attribute"
if resource.content? and typeof resource.content != "string"
throw "content attribute should be a string"
if resource.url? and typeof resource.url != "string"
throw "url attribute should be a string"
return {
path: resource.path
modified: modified
url: resource.url
content: resource.content
}
_parseAttribute: (name, attribute, options) ->
if attribute?
if options.validValues?
if options.validValues.indexOf(attribute) == -1
throw "#{name} attribute should be one of: #{options.validValues.join(", ")}"
if options.type?
if typeof attribute != options.type
throw "#{name} attribute should be a #{options.type}"
else
return options.default if options.default?
return attribute
_sanitizePath: (path) ->
# See http://php.net/manual/en/function.escapeshellcmd.php
path.replace(/[\#\&\;\`\|\*\?\~\<\>\^\(\)\[\]\{\}\$\\\x0A\xFF\x00]/g, "")
_checkPath: (path) ->
# check that the request does not use a relative path
for dir in path.split('/')
if dir == '..'
throw "relative path in root resource"
return path

View File

@@ -1,72 +0,0 @@
Path = require "path"
fs = require "fs"
logger = require "logger-sharelatex"
settings = require("settings-sharelatex")
Errors = require "./Errors"
SafeReader = require "./SafeReader"
module.exports = ResourceStateManager =
# The sync state is an identifier which must match for an
# incremental update to be allowed.
#
# The initial value is passed in and stored on a full
# compile, along with the list of resources..
#
# Subsequent incremental compiles must come with the same value - if
# not they will be rejected with a 409 Conflict response. The
# previous list of resources is returned.
#
# An incremental compile can only update existing files with new
# content. The sync state identifier must change if any docs or
# files are moved, added, deleted or renamed.
SYNC_STATE_FILE: ".project-sync-state"
SYNC_STATE_MAX_SIZE: 128*1024
saveProjectState: (state, resources, basePath, callback = (error) ->) ->
stateFile = Path.join(basePath, @SYNC_STATE_FILE)
if not state? # remove the file if no state passed in
logger.log state:state, basePath:basePath, "clearing sync state"
fs.unlink stateFile, (err) ->
if err? and err.code isnt 'ENOENT'
return callback(err)
else
return callback()
else
logger.log state:state, basePath:basePath, "writing sync state"
resourceList = (resource.path for resource in resources)
fs.writeFile stateFile, [resourceList..., "stateHash:#{state}"].join("\n"), callback
checkProjectStateMatches: (state, basePath, callback = (error, resources) ->) ->
stateFile = Path.join(basePath, @SYNC_STATE_FILE)
size = @SYNC_STATE_MAX_SIZE
SafeReader.readFile stateFile, size, 'utf8', (err, result, bytesRead) ->
return callback(err) if err?
if bytesRead is size
logger.error file:stateFile, size:size, bytesRead:bytesRead, "project state file truncated"
[resourceList..., oldState] = result?.toString()?.split("\n") or []
newState = "stateHash:#{state}"
logger.log state:state, oldState: oldState, basePath:basePath, stateMatches: (newState is oldState), "checking sync state"
if newState isnt oldState
return callback new Errors.FilesOutOfSyncError("invalid state for incremental update")
else
resources = ({path: path} for path in resourceList)
callback(null, resources)
checkResourceFiles: (resources, allFiles, basePath, callback = (error) ->) ->
# check the paths are all relative to current directory
for file in resources or []
for dir in file?.path?.split('/')
if dir == '..'
return callback new Error("relative path in resource file list")
# check if any of the input files are not present in list of files
seenFile = {}
for file in allFiles
seenFile[file] = true
missingFiles = (resource.path for resource in resources when not seenFile[resource.path])
if missingFiles?.length > 0
logger.err missingFiles:missingFiles, basePath:basePath, allFiles:allFiles, resources:resources, "missing input files for project"
return callback new Errors.FilesOutOfSyncError("resource files missing in incremental update")
else
callback()

View File

@@ -1,130 +0,0 @@
UrlCache = require "./UrlCache"
Path = require "path"
fs = require "fs"
async = require "async"
mkdirp = require "mkdirp"
OutputFileFinder = require "./OutputFileFinder"
ResourceStateManager = require "./ResourceStateManager"
Metrics = require "./Metrics"
logger = require "logger-sharelatex"
settings = require("settings-sharelatex")
parallelFileDownloads = settings.parallelFileDownloads or 1
module.exports = ResourceWriter =
syncResourcesToDisk: (request, basePath, callback = (error, resourceList) ->) ->
if request.syncType is "incremental"
logger.log project_id: request.project_id, user_id: request.user_id, "incremental sync"
ResourceStateManager.checkProjectStateMatches request.syncState, basePath, (error, resourceList) ->
return callback(error) if error?
ResourceWriter._removeExtraneousFiles resourceList, basePath, (error, outputFiles, allFiles) ->
return callback(error) if error?
ResourceStateManager.checkResourceFiles resourceList, allFiles, basePath, (error) ->
return callback(error) if error?
ResourceWriter.saveIncrementalResourcesToDisk request.project_id, request.resources, basePath, (error) ->
return callback(error) if error?
callback(null, resourceList)
else
logger.log project_id: request.project_id, user_id: request.user_id, "full sync"
@saveAllResourcesToDisk request.project_id, request.resources, basePath, (error) ->
return callback(error) if error?
ResourceStateManager.saveProjectState request.syncState, request.resources, basePath, (error) ->
return callback(error) if error?
callback(null, request.resources)
saveIncrementalResourcesToDisk: (project_id, resources, basePath, callback = (error) ->) ->
@_createDirectory basePath, (error) =>
return callback(error) if error?
jobs = for resource in resources
do (resource) =>
(callback) => @_writeResourceToDisk(project_id, resource, basePath, callback)
async.parallelLimit jobs, parallelFileDownloads, callback
saveAllResourcesToDisk: (project_id, resources, basePath, callback = (error) ->) ->
@_createDirectory basePath, (error) =>
return callback(error) if error?
@_removeExtraneousFiles resources, basePath, (error) =>
return callback(error) if error?
jobs = for resource in resources
do (resource) =>
(callback) => @_writeResourceToDisk(project_id, resource, basePath, callback)
async.parallelLimit jobs, parallelFileDownloads, callback
_createDirectory: (basePath, callback = (error) ->) ->
fs.mkdir basePath, (err) ->
if err?
if err.code is 'EEXIST'
return callback()
else
logger.log {err: err, dir:basePath}, "error creating directory"
return callback(err)
else
return callback()
_removeExtraneousFiles: (resources, basePath, _callback = (error, outputFiles, allFiles) ->) ->
timer = new Metrics.Timer("unlink-output-files")
callback = (error, result...) ->
timer.done()
_callback(error, result...)
OutputFileFinder.findOutputFiles resources, basePath, (error, outputFiles, allFiles) ->
return callback(error) if error?
jobs = []
for file in outputFiles or []
do (file) ->
path = file.path
should_delete = true
if path.match(/^output\./) or path.match(/\.aux$/) or path.match(/^cache\//) # knitr cache
should_delete = false
if path.match(/^output-.*/) # Tikz cached figures
should_delete = false
if path == "output.pdf" or path == "output.dvi" or path == "output.log" or path == "output.xdv"
should_delete = true
if path == "output.tex" # created by TikzManager if present in output files
should_delete = true
if should_delete
jobs.push (callback) -> ResourceWriter._deleteFileIfNotDirectory Path.join(basePath, path), callback
async.series jobs, (error) ->
return callback(error) if error?
callback(null, outputFiles, allFiles)
_deleteFileIfNotDirectory: (path, callback = (error) ->) ->
fs.stat path, (error, stat) ->
if error? and error.code is 'ENOENT'
return callback()
else if error?
logger.err {err: error, path: path}, "error stating file in deleteFileIfNotDirectory"
return callback(error)
else if stat.isFile()
fs.unlink path, (error) ->
if error?
logger.err {err: error, path: path}, "error removing file in deleteFileIfNotDirectory"
callback(error)
else
callback()
else
callback()
_writeResourceToDisk: (project_id, resource, basePath, callback = (error) ->) ->
ResourceWriter.checkPath basePath, resource.path, (error, path) ->
return callback(error) if error?
mkdirp Path.dirname(path), (error) ->
return callback(error) if error?
# TODO: Don't overwrite file if it hasn't been modified
if resource.url?
UrlCache.downloadUrlToFile project_id, resource.url, path, resource.modified, (err)->
if err?
logger.err err:err, project_id:project_id, path:path, resource_url:resource.url, modified:resource.modified, "error downloading file for resources"
callback() #try and continue compiling even if http resource can not be downloaded at this time
else
fs.writeFile path, resource.content, callback
checkPath: (basePath, resourcePath, callback) ->
path = Path.normalize(Path.join(basePath, resourcePath))
if (path.slice(0, basePath.length + 1) != basePath + "/")
return callback new Error("resource path is outside root directory")
else
return callback(null, path)

View File

@@ -1,25 +0,0 @@
fs = require "fs"
logger = require "logger-sharelatex"
module.exports = SafeReader =
# safely read up to size bytes from a file and return result as a
# string
readFile: (file, size, encoding, callback = (error, result) ->) ->
fs.open file, 'r', (err, fd) ->
return callback() if err? and err.code is 'ENOENT'
return callback(err) if err?
# safely return always closing the file
callbackWithClose = (err, result...) ->
fs.close fd, (err1) ->
return callback(err) if err?
return callback(err1) if err1?
callback(null, result...)
buff = new Buffer(size, 0) # fill with zeros
fs.read fd, buff, 0, buff.length, 0, (err, bytesRead, buffer) ->
return callbackWithClose(err) if err?
result = buffer.toString(encoding, 0, bytesRead)
callbackWithClose(null, result, bytesRead)

View File

@@ -1,41 +0,0 @@
Path = require("path")
fs = require("fs")
Settings = require("settings-sharelatex")
logger = require("logger-sharelatex")
url = require "url"
module.exports = ForbidSymlinks = (staticFn, root, options) ->
expressStatic = staticFn root, options
basePath = Path.resolve(root)
return (req, res, next) ->
path = url.parse(req.url)?.pathname
# check that the path is of the form /project_id_or_name/path/to/file.log
if result = path.match(/^\/?([a-zA-Z0-9_-]+)\/(.*)/)
project_id = result[1]
file = result[2]
else
logger.warn path: path, "unrecognized file request"
return res.sendStatus(404)
# check that the file does not use a relative path
for dir in file.split('/')
if dir == '..'
logger.warn path: path, "attempt to use a relative path"
return res.sendStatus(404)
# check that the requested path is normalized
requestedFsPath = "#{basePath}/#{project_id}/#{file}"
if requestedFsPath != Path.normalize(requestedFsPath)
logger.error path: requestedFsPath, "requestedFsPath is not normalized"
return res.sendStatus(404)
# check that the requested path is not a symlink
fs.realpath requestedFsPath, (err, realFsPath)->
if err?
if err.code == 'ENOENT'
return res.sendStatus(404)
else
logger.error err:err, requestedFsPath:requestedFsPath, realFsPath:realFsPath, path: req.params[0], project_id: req.params.project_id, "error checking file access"
return res.sendStatus(500)
else if requestedFsPath != realFsPath
logger.warn requestedFsPath:requestedFsPath, realFsPath:realFsPath, path: req.params[0], project_id: req.params.project_id, "trying to access a different file (symlink), aborting"
return res.sendStatus(404)
else
expressStatic(req, res, next)

View File

@@ -1,35 +0,0 @@
fs = require "fs"
Path = require "path"
ResourceWriter = require "./ResourceWriter"
SafeReader = require "./SafeReader"
logger = require "logger-sharelatex"
# for \tikzexternalize to work the main file needs to match the
# jobname. Since we set the -jobname to output, we have to create a
# copy of the main file as 'output.tex'.
module.exports = TikzManager =
checkMainFile: (compileDir, mainFile, resources, callback = (error, usesTikzExternalize) ->) ->
# if there's already an output.tex file, we don't want to touch it
for resource in resources
if resource.path is "output.tex"
logger.log compileDir: compileDir, mainFile: mainFile, "output.tex already in resources"
return callback(null, false)
# if there's no output.tex, see if we are using tikz/pgf in the main file
ResourceWriter.checkPath compileDir, mainFile, (error, path) ->
return callback(error) if error?
SafeReader.readFile path, 65536, "utf8", (error, content) ->
return callback(error) if error?
usesTikzExternalize = content?.indexOf("\\tikzexternalize") >= 0
logger.log compileDir: compileDir, mainFile: mainFile, usesTikzExternalize:usesTikzExternalize, "checked for tikzexternalize"
callback null, usesTikzExternalize
injectOutputFile: (compileDir, mainFile, callback = (error) ->) ->
ResourceWriter.checkPath compileDir, mainFile, (error, path) ->
return callback(error) if error?
fs.readFile path, "utf8", (error, content) ->
return callback(error) if error?
logger.log compileDir: compileDir, mainFile: mainFile, "copied file to output.tex for tikz"
# use wx flag to ensure that output file does not already exist
fs.writeFile Path.join(compileDir, "output.tex"), content, {flag:'wx'}, callback

View File

@@ -1,125 +0,0 @@
db = require("./db")
UrlFetcher = require("./UrlFetcher")
Settings = require("settings-sharelatex")
crypto = require("crypto")
fs = require("fs")
logger = require "logger-sharelatex"
async = require "async"
module.exports = UrlCache =
downloadUrlToFile: (project_id, url, destPath, lastModified, callback = (error) ->) ->
UrlCache._ensureUrlIsInCache project_id, url, lastModified, (error, pathToCachedUrl) =>
return callback(error) if error?
UrlCache._copyFile pathToCachedUrl, destPath, (error) ->
if error?
UrlCache._clearUrlDetails project_id, url, () ->
callback(error)
else
callback(error)
clearProject: (project_id, callback = (error) ->) ->
UrlCache._findAllUrlsInProject project_id, (error, urls) ->
logger.log project_id: project_id, url_count: urls.length, "clearing project URLs"
return callback(error) if error?
jobs = for url in (urls or [])
do (url) ->
(callback) ->
UrlCache._clearUrlFromCache project_id, url, (error) ->
if error?
logger.error err: error, project_id: project_id, url: url, "error clearing project URL"
callback()
async.series jobs, callback
_ensureUrlIsInCache: (project_id, url, lastModified, callback = (error, pathOnDisk) ->) ->
if lastModified?
# MYSQL only stores dates to an accuracy of a second but the incoming lastModified might have milliseconds.
# So round down to seconds
lastModified = new Date(Math.floor(lastModified.getTime() / 1000) * 1000)
UrlCache._doesUrlNeedDownloading project_id, url, lastModified, (error, needsDownloading) =>
return callback(error) if error?
if needsDownloading
logger.log url: url, lastModified: lastModified, "downloading URL"
UrlFetcher.pipeUrlToFile url, UrlCache._cacheFilePathForUrl(project_id, url), (error) =>
return callback(error) if error?
UrlCache._updateOrCreateUrlDetails project_id, url, lastModified, (error) =>
return callback(error) if error?
callback null, UrlCache._cacheFilePathForUrl(project_id, url)
else
logger.log url: url, lastModified: lastModified, "URL is up to date in cache"
callback null, UrlCache._cacheFilePathForUrl(project_id, url)
_doesUrlNeedDownloading: (project_id, url, lastModified, callback = (error, needsDownloading) ->) ->
if !lastModified?
return callback null, true
UrlCache._findUrlDetails project_id, url, (error, urlDetails) ->
return callback(error) if error?
if !urlDetails? or !urlDetails.lastModified? or urlDetails.lastModified.getTime() < lastModified.getTime()
return callback null, true
else
return callback null, false
_cacheFileNameForUrl: (project_id, url) ->
project_id + ":" + crypto.createHash("md5").update(url).digest("hex")
_cacheFilePathForUrl: (project_id, url) ->
"#{Settings.path.clsiCacheDir}/#{UrlCache._cacheFileNameForUrl(project_id, url)}"
_copyFile: (from, to, _callback = (error) ->) ->
callbackOnce = (error) ->
if error?
logger.error err: error, from:from, to:to, "error copying file from cache"
_callback(error)
_callback = () ->
writeStream = fs.createWriteStream(to)
readStream = fs.createReadStream(from)
writeStream.on "error", callbackOnce
readStream.on "error", callbackOnce
writeStream.on "close", callbackOnce
writeStream.on "open", () ->
readStream.pipe(writeStream)
_clearUrlFromCache: (project_id, url, callback = (error) ->) ->
UrlCache._clearUrlDetails project_id, url, (error) ->
return callback(error) if error?
UrlCache._deleteUrlCacheFromDisk project_id, url, (error) ->
return callback(error) if error?
callback null
_deleteUrlCacheFromDisk: (project_id, url, callback = (error) ->) ->
fs.unlink UrlCache._cacheFilePathForUrl(project_id, url), (error) ->
if error? and error.code != 'ENOENT' # no error if the file isn't present
return callback(error)
else
return callback()
_findUrlDetails: (project_id, url, callback = (error, urlDetails) ->) ->
db.UrlCache.find(where: { url: url, project_id: project_id })
.then((urlDetails) -> callback null, urlDetails)
.error callback
_updateOrCreateUrlDetails: (project_id, url, lastModified, callback = (error) ->) ->
db.UrlCache.findOrCreate(where: {url: url, project_id: project_id})
.spread(
(urlDetails, created) ->
urlDetails.updateAttributes(lastModified: lastModified)
.then(() -> callback())
.error(callback)
)
.error callback
_clearUrlDetails: (project_id, url, callback = (error) ->) ->
db.UrlCache.destroy(where: {url: url, project_id: project_id})
.then(() -> callback null)
.error callback
_findAllUrlsInProject: (project_id, callback = (error, urls) ->) ->
db.UrlCache.findAll(where: { project_id: project_id })
.then(
(urlEntries) ->
callback null, urlEntries.map((entry) -> entry.url)
)
.error callback

View File

@@ -1,65 +0,0 @@
request = require("request").defaults(jar: false)
fs = require("fs")
logger = require "logger-sharelatex"
oneMinute = 60 * 1000
module.exports = UrlFetcher =
pipeUrlToFile: (url, filePath, _callback = (error) ->) ->
callbackOnce = (error) ->
clearTimeout timeoutHandler if timeoutHandler?
_callback(error)
_callback = () ->
timeoutHandler = setTimeout () ->
timeoutHandler = null
logger.error url:url, filePath: filePath, "Timed out downloading file to cache"
callbackOnce(new Error("Timed out downloading file to cache #{url}"))
# FIXME: maybe need to close fileStream here
, 3 * oneMinute
logger.log url:url, filePath: filePath, "started downloading url to cache"
urlStream = request.get({url: url, timeout: oneMinute})
urlStream.pause() # stop data flowing until we are ready
# attach handlers before setting up pipes
urlStream.on "error", (error) ->
logger.error err: error, url:url, filePath: filePath, "error downloading url"
callbackOnce(error or new Error("Something went wrong downloading the URL #{url}"))
urlStream.on "end", () ->
logger.log url:url, filePath: filePath, "finished downloading file into cache"
urlStream.on "response", (res) ->
if res.statusCode >= 200 and res.statusCode < 300
fileStream = fs.createWriteStream(filePath)
# attach handlers before setting up pipes
fileStream.on 'error', (error) ->
logger.error err: error, url:url, filePath: filePath, "error writing file into cache"
fs.unlink filePath, (err) ->
if err?
logger.err err: err, filePath: filePath, "error deleting file from cache"
callbackOnce(error)
fileStream.on 'finish', () ->
logger.log url:url, filePath: filePath, "finished writing file into cache"
callbackOnce()
fileStream.on 'pipe', () ->
logger.log url:url, filePath: filePath, "piping into filestream"
urlStream.pipe(fileStream)
urlStream.resume() # now we are ready to handle the data
else
logger.error statusCode: res.statusCode, url:url, filePath: filePath, "unexpected status code downloading url to cache"
# https://nodejs.org/api/http.html#http_class_http_clientrequest
# If you add a 'response' event handler, then you must consume
# the data from the response object, either by calling
# response.read() whenever there is a 'readable' event, or by
# adding a 'data' handler, or by calling the .resume()
# method. Until the data is consumed, the 'end' event will not
# fire. Also, until the data is read it will consume memory
# that can eventually lead to a 'process out of memory' error.
urlStream.resume() # discard the data
callbackOnce(new Error("URL returned non-success status code: #{res.statusCode} #{url}"))

View File

@@ -1,36 +0,0 @@
Sequelize = require("sequelize")
Settings = require("settings-sharelatex")
_ = require("underscore")
options = _.extend {logging:false}, Settings.mysql.clsi
sequelize = new Sequelize(
Settings.mysql.clsi.database,
Settings.mysql.clsi.username,
Settings.mysql.clsi.password,
options
)
module.exports =
UrlCache: sequelize.define("UrlCache", {
url: Sequelize.STRING
project_id: Sequelize.STRING
lastModified: Sequelize.DATE
}, {
indexes: [
{fields: ['url', 'project_id']},
{fields: ['project_id']}
]
})
Project: sequelize.define("Project", {
project_id: {type: Sequelize.STRING, primaryKey: true}
lastAccessed: Sequelize.DATE
}, {
indexes: [
{fields: ['lastAccessed']}
]
})
sync: () -> sequelize.sync()

20
app/js/CommandRunner.js Normal file
View File

@@ -0,0 +1,20 @@
// TODO: This file was created by bulk-decaffeinate.
// Sanity-check the conversion and remove this comment.
/*
* decaffeinate suggestions:
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let commandRunnerPath
const Settings = require('settings-sharelatex')
const logger = require('logger-sharelatex')
if ((Settings.clsi != null ? Settings.clsi.dockerRunner : undefined) === true) {
commandRunnerPath = './DockerRunner'
} else {
commandRunnerPath = './LocalCommandRunner'
}
logger.info({ commandRunnerPath }, 'selecting command runner for clsi')
const CommandRunner = require(commandRunnerPath)
module.exports = CommandRunner

242
app/js/CompileController.js Normal file
View File

@@ -0,0 +1,242 @@
/* eslint-disable
camelcase,
handle-callback-err,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let CompileController
const RequestParser = require('./RequestParser')
const CompileManager = require('./CompileManager')
const Settings = require('settings-sharelatex')
const Metrics = require('./Metrics')
const ProjectPersistenceManager = require('./ProjectPersistenceManager')
const logger = require('logger-sharelatex')
const Errors = require('./Errors')
module.exports = CompileController = {
compile(req, res, next) {
if (next == null) {
next = function(error) {}
}
const timer = new Metrics.Timer('compile-request')
return RequestParser.parse(req.body, function(error, request) {
if (error != null) {
return next(error)
}
request.project_id = req.params.project_id
if (req.params.user_id != null) {
request.user_id = req.params.user_id
}
return ProjectPersistenceManager.markProjectAsJustAccessed(
request.project_id,
function(error) {
if (error != null) {
return next(error)
}
return CompileManager.doCompileWithLock(request, function(
error,
outputFiles
) {
let code, status
if (outputFiles == null) {
outputFiles = []
}
if (error instanceof Errors.AlreadyCompilingError) {
code = 423 // Http 423 Locked
status = 'compile-in-progress'
} else if (error instanceof Errors.FilesOutOfSyncError) {
code = 409 // Http 409 Conflict
status = 'retry'
} else if (error && error.code === 'EPIPE') {
// docker returns EPIPE when shutting down
code = 503 // send 503 Unavailable response
status = 'unavailable'
} else if (error != null ? error.terminated : undefined) {
status = 'terminated'
} else if (error != null ? error.validate : undefined) {
status = `validation-${error.validate}`
} else if (error != null ? error.timedout : undefined) {
status = 'timedout'
logger.log(
{ err: error, project_id: request.project_id },
'timeout running compile'
)
} else if (error != null) {
status = 'error'
code = 500
logger.warn(
{ err: error, project_id: request.project_id },
'error running compile'
)
} else {
let file
status = 'failure'
for (file of Array.from(outputFiles)) {
if (
file.path != null
? file.path.match(/output\.pdf$/)
: undefined
) {
status = 'success'
}
}
if (status === 'failure') {
logger.warn(
{ project_id: request.project_id, outputFiles },
'project failed to compile successfully, no output.pdf generated'
)
}
// log an error if any core files are found
for (file of Array.from(outputFiles)) {
if (file.path === 'core') {
logger.error(
{ project_id: request.project_id, req, outputFiles },
'core file found in output'
)
}
}
}
if (error != null) {
outputFiles = error.outputFiles || []
}
timer.done()
return res.status(code || 200).send({
compile: {
status,
error: (error != null ? error.message : undefined) || error,
outputFiles: outputFiles.map(file => ({
url:
`${Settings.apis.clsi.url}/project/${request.project_id}` +
(request.user_id != null
? `/user/${request.user_id}`
: '') +
(file.build != null ? `/build/${file.build}` : '') +
`/output/${file.path}`,
path: file.path,
type: file.type,
build: file.build
}))
}
})
})
}
)
})
},
stopCompile(req, res, next) {
const { project_id, user_id } = req.params
return CompileManager.stopCompile(project_id, user_id, function(error) {
if (error != null) {
return next(error)
}
return res.sendStatus(204)
})
},
clearCache(req, res, next) {
if (next == null) {
next = function(error) {}
}
return ProjectPersistenceManager.clearProject(
req.params.project_id,
req.params.user_id,
function(error) {
if (error != null) {
return next(error)
}
return res.sendStatus(204)
}
)
}, // No content
syncFromCode(req, res, next) {
if (next == null) {
next = function(error) {}
}
const { file } = req.query
const line = parseInt(req.query.line, 10)
const column = parseInt(req.query.column, 10)
const { project_id } = req.params
const { user_id } = req.params
return CompileManager.syncFromCode(
project_id,
user_id,
file,
line,
column,
function(error, pdfPositions) {
if (error != null) {
return next(error)
}
return res.json({
pdf: pdfPositions
})
}
)
},
syncFromPdf(req, res, next) {
if (next == null) {
next = function(error) {}
}
const page = parseInt(req.query.page, 10)
const h = parseFloat(req.query.h)
const v = parseFloat(req.query.v)
const { project_id } = req.params
const { user_id } = req.params
return CompileManager.syncFromPdf(project_id, user_id, page, h, v, function(
error,
codePositions
) {
if (error != null) {
return next(error)
}
return res.json({
code: codePositions
})
})
},
wordcount(req, res, next) {
if (next == null) {
next = function(error) {}
}
const file = req.query.file || 'main.tex'
const { project_id } = req.params
const { user_id } = req.params
const { image } = req.query
logger.log({ image, file, project_id }, 'word count request')
return CompileManager.wordcount(project_id, user_id, file, image, function(
error,
result
) {
if (error != null) {
return next(error)
}
return res.json({
texcount: result
})
})
},
status(req, res, next) {
if (next == null) {
next = function(error) {}
}
return res.send('OK')
}
}

693
app/js/CompileManager.js Normal file
View File

@@ -0,0 +1,693 @@
/* eslint-disable
camelcase,
handle-callback-err,
no-return-assign,
no-undef,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS103: Rewrite code to no longer use __guard__
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let CompileManager
const ResourceWriter = require('./ResourceWriter')
const LatexRunner = require('./LatexRunner')
const OutputFileFinder = require('./OutputFileFinder')
const OutputCacheManager = require('./OutputCacheManager')
const Settings = require('settings-sharelatex')
const Path = require('path')
const logger = require('logger-sharelatex')
const Metrics = require('./Metrics')
const child_process = require('child_process')
const DraftModeManager = require('./DraftModeManager')
const TikzManager = require('./TikzManager')
const LockManager = require('./LockManager')
const fs = require('fs')
const fse = require('fs-extra')
const os = require('os')
const async = require('async')
const Errors = require('./Errors')
const CommandRunner = require('./CommandRunner')
const getCompileName = function(project_id, user_id) {
if (user_id != null) {
return `${project_id}-${user_id}`
} else {
return project_id
}
}
const getCompileDir = (project_id, user_id) =>
Path.join(Settings.path.compilesDir, getCompileName(project_id, user_id))
module.exports = CompileManager = {
doCompileWithLock(request, callback) {
if (callback == null) {
callback = function(error, outputFiles) {}
}
const compileDir = getCompileDir(request.project_id, request.user_id)
const lockFile = Path.join(compileDir, '.project-lock')
// use a .project-lock file in the compile directory to prevent
// simultaneous compiles
return fse.ensureDir(compileDir, function(error) {
if (error != null) {
return callback(error)
}
return LockManager.runWithLock(
lockFile,
releaseLock => CompileManager.doCompile(request, releaseLock),
callback
)
})
},
doCompile(request, callback) {
if (callback == null) {
callback = function(error, outputFiles) {}
}
const compileDir = getCompileDir(request.project_id, request.user_id)
let timer = new Metrics.Timer('write-to-disk')
logger.log(
{ project_id: request.project_id, user_id: request.user_id },
'syncing resources to disk'
)
return ResourceWriter.syncResourcesToDisk(request, compileDir, function(
error,
resourceList
) {
// NOTE: resourceList is insecure, it should only be used to exclude files from the output list
if (error != null && error instanceof Errors.FilesOutOfSyncError) {
logger.warn(
{ project_id: request.project_id, user_id: request.user_id },
'files out of sync, please retry'
)
return callback(error)
} else if (error != null) {
logger.err(
{
err: error,
project_id: request.project_id,
user_id: request.user_id
},
'error writing resources to disk'
)
return callback(error)
}
logger.log(
{
project_id: request.project_id,
user_id: request.user_id,
time_taken: Date.now() - timer.start
},
'written files to disk'
)
timer.done()
const injectDraftModeIfRequired = function(callback) {
if (request.draft) {
return DraftModeManager.injectDraftMode(
Path.join(compileDir, request.rootResourcePath),
callback
)
} else {
return callback()
}
}
const createTikzFileIfRequired = callback =>
TikzManager.checkMainFile(
compileDir,
request.rootResourcePath,
resourceList,
function(error, needsMainFile) {
if (error != null) {
return callback(error)
}
if (needsMainFile) {
return TikzManager.injectOutputFile(
compileDir,
request.rootResourcePath,
callback
)
} else {
return callback()
}
}
)
// set up environment variables for chktex
const env = {}
if (Settings.texliveOpenoutAny && Settings.texliveOpenoutAny !== '') {
// override default texlive openout_any environment variable
env.openout_any = Settings.texliveOpenoutAny
}
// only run chktex on LaTeX files (not knitr .Rtex files or any others)
const isLaTeXFile =
request.rootResourcePath != null
? request.rootResourcePath.match(/\.tex$/i)
: undefined
if (request.check != null && isLaTeXFile) {
env.CHKTEX_OPTIONS = '-nall -e9 -e10 -w15 -w16'
env.CHKTEX_ULIMIT_OPTIONS = '-t 5 -v 64000'
if (request.check === 'error') {
env.CHKTEX_EXIT_ON_ERROR = 1
}
if (request.check === 'validate') {
env.CHKTEX_VALIDATE = 1
}
}
// apply a series of file modifications/creations for draft mode and tikz
return async.series(
[injectDraftModeIfRequired, createTikzFileIfRequired],
function(error) {
if (error != null) {
return callback(error)
}
timer = new Metrics.Timer('run-compile')
// find the image tag to log it as a metric, e.g. 2015.1 (convert . to - for graphite)
let tag =
__guard__(
__guard__(
request.imageName != null
? request.imageName.match(/:(.*)/)
: undefined,
x1 => x1[1]
),
x => x.replace(/\./g, '-')
) || 'default'
if (!request.project_id.match(/^[0-9a-f]{24}$/)) {
tag = 'other'
} // exclude smoke test
Metrics.inc('compiles')
Metrics.inc(`compiles-with-image.${tag}`)
const compileName = getCompileName(
request.project_id,
request.user_id
)
return LatexRunner.runLatex(
compileName,
{
directory: compileDir,
mainFile: request.rootResourcePath,
compiler: request.compiler,
timeout: request.timeout,
image: request.imageName,
flags: request.flags,
environment: env,
compileGroup: request.compileGroup
},
function(error, output, stats, timings) {
// request was for validation only
let metric_key, metric_value
if (request.check === 'validate') {
const result = (error != null
? error.code
: undefined)
? 'fail'
: 'pass'
error = new Error('validation')
error.validate = result
}
// request was for compile, and failed on validation
if (
request.check === 'error' &&
(error != null ? error.message : undefined) === 'exited'
) {
error = new Error('compilation')
error.validate = 'fail'
}
// compile was killed by user, was a validation, or a compile which failed validation
if (
(error != null ? error.terminated : undefined) ||
(error != null ? error.validate : undefined) ||
(error != null ? error.timedout : undefined)
) {
OutputFileFinder.findOutputFiles(
resourceList,
compileDir,
function(err, outputFiles) {
if (err != null) {
return callback(err)
}
error.outputFiles = outputFiles // return output files so user can check logs
return callback(error)
}
)
return
}
// compile completed normally
if (error != null) {
return callback(error)
}
Metrics.inc('compiles-succeeded')
const object = stats || {}
for (metric_key in object) {
metric_value = object[metric_key]
Metrics.count(metric_key, metric_value)
}
const object1 = timings || {}
for (metric_key in object1) {
metric_value = object1[metric_key]
Metrics.timing(metric_key, metric_value)
}
const loadavg =
typeof os.loadavg === 'function' ? os.loadavg() : undefined
if (loadavg != null) {
Metrics.gauge('load-avg', loadavg[0])
}
const ts = timer.done()
logger.log(
{
project_id: request.project_id,
user_id: request.user_id,
time_taken: ts,
stats,
timings,
loadavg
},
'done compile'
)
if ((stats != null ? stats['latex-runs'] : undefined) > 0) {
Metrics.timing('run-compile-per-pass', ts / stats['latex-runs'])
}
if (
(stats != null ? stats['latex-runs'] : undefined) > 0 &&
(timings != null ? timings['cpu-time'] : undefined) > 0
) {
Metrics.timing(
'run-compile-cpu-time-per-pass',
timings['cpu-time'] / stats['latex-runs']
)
}
return OutputFileFinder.findOutputFiles(
resourceList,
compileDir,
function(error, outputFiles) {
if (error != null) {
return callback(error)
}
return OutputCacheManager.saveOutputFiles(
outputFiles,
compileDir,
(error, newOutputFiles) => callback(null, newOutputFiles)
)
}
)
}
)
}
)
})
},
stopCompile(project_id, user_id, callback) {
if (callback == null) {
callback = function(error) {}
}
const compileName = getCompileName(project_id, user_id)
return LatexRunner.killLatex(compileName, callback)
},
clearProject(project_id, user_id, _callback) {
if (_callback == null) {
_callback = function(error) {}
}
const callback = function(error) {
_callback(error)
return (_callback = function() {})
}
const compileDir = getCompileDir(project_id, user_id)
return CompileManager._checkDirectory(compileDir, function(err, exists) {
if (err != null) {
return callback(err)
}
if (!exists) {
return callback()
} // skip removal if no directory present
const proc = child_process.spawn('rm', ['-r', compileDir])
proc.on('error', callback)
let stderr = ''
proc.stderr.setEncoding('utf8').on('data', chunk => (stderr += chunk))
return proc.on('close', function(code) {
if (code === 0) {
return callback(null)
} else {
return callback(new Error(`rm -r ${compileDir} failed: ${stderr}`))
}
})
})
},
_findAllDirs(callback) {
if (callback == null) {
callback = function(error, allDirs) {}
}
const root = Settings.path.compilesDir
return fs.readdir(root, function(err, files) {
if (err != null) {
return callback(err)
}
const allDirs = Array.from(files).map(file => Path.join(root, file))
return callback(null, allDirs)
})
},
clearExpiredProjects(max_cache_age_ms, callback) {
if (callback == null) {
callback = function(error) {}
}
const now = Date.now()
// action for each directory
const expireIfNeeded = (checkDir, cb) =>
fs.stat(checkDir, function(err, stats) {
if (err != null) {
return cb()
} // ignore errors checking directory
const age = now - stats.mtime
const hasExpired = age > max_cache_age_ms
if (hasExpired) {
return fse.remove(checkDir, cb)
} else {
return cb()
}
})
// iterate over all project directories
return CompileManager._findAllDirs(function(error, allDirs) {
if (error != null) {
return callback()
}
return async.eachSeries(allDirs, expireIfNeeded, callback)
})
},
_checkDirectory(compileDir, callback) {
if (callback == null) {
callback = function(error, exists) {}
}
return fs.lstat(compileDir, function(err, stats) {
if ((err != null ? err.code : undefined) === 'ENOENT') {
return callback(null, false) // directory does not exist
} else if (err != null) {
logger.err(
{ dir: compileDir, err },
'error on stat of project directory for removal'
)
return callback(err)
} else if (!(stats != null ? stats.isDirectory() : undefined)) {
logger.err(
{ dir: compileDir, stats },
'bad project directory for removal'
)
return callback(new Error('project directory is not directory'))
} else {
return callback(null, true)
}
})
}, // directory exists
syncFromCode(project_id, user_id, file_name, line, column, callback) {
// If LaTeX was run in a virtual environment, the file path that synctex expects
// might not match the file path on the host. The .synctex.gz file however, will be accessed
// wherever it is on the host.
if (callback == null) {
callback = function(error, pdfPositions) {}
}
const compileName = getCompileName(project_id, user_id)
const base_dir = Settings.path.synctexBaseDir(compileName)
const file_path = base_dir + '/' + file_name
const compileDir = getCompileDir(project_id, user_id)
const synctex_path = `${base_dir}/output.pdf`
const command = ['code', synctex_path, file_path, line, column]
CompileManager._runSynctex(project_id, user_id, command, function(
error,
stdout
) {
if (error != null) {
return callback(error)
}
logger.log(
{ project_id, user_id, file_name, line, column, command, stdout },
'synctex code output'
)
return callback(null, CompileManager._parseSynctexFromCodeOutput(stdout))
})
},
syncFromPdf(project_id, user_id, page, h, v, callback) {
if (callback == null) {
callback = function(error, filePositions) {}
}
const compileName = getCompileName(project_id, user_id)
const compileDir = getCompileDir(project_id, user_id)
const base_dir = Settings.path.synctexBaseDir(compileName)
const synctex_path = `${base_dir}/output.pdf`
const command = ['pdf', synctex_path, page, h, v]
CompileManager._runSynctex(project_id, user_id, command, function(
error,
stdout
) {
if (error != null) {
return callback(error)
}
logger.log(
{ project_id, user_id, page, h, v, stdout },
'synctex pdf output'
)
return callback(
null,
CompileManager._parseSynctexFromPdfOutput(stdout, base_dir)
)
})
},
_checkFileExists(dir, filename, callback) {
if (callback == null) {
callback = function(error) {}
}
const file = Path.join(dir, filename)
return fs.stat(dir, function(error, stats) {
if ((error != null ? error.code : undefined) === 'ENOENT') {
return callback(new Errors.NotFoundError('no output directory'))
}
if (error != null) {
return callback(error)
}
return fs.stat(file, function(error, stats) {
if ((error != null ? error.code : undefined) === 'ENOENT') {
return callback(new Errors.NotFoundError('no output file'))
}
if (error != null) {
return callback(error)
}
if (!(stats != null ? stats.isFile() : undefined)) {
return callback(new Error('not a file'))
}
return callback()
})
})
},
_runSynctex(project_id, user_id, command, callback) {
if (callback == null) {
callback = function(error, stdout) {}
}
const seconds = 1000
command.unshift('/opt/synctex')
const directory = getCompileDir(project_id, user_id)
const timeout = 60 * 1000 // increased to allow for large projects
const compileName = getCompileName(project_id, user_id)
const compileGroup = 'synctex'
CompileManager._checkFileExists(directory, 'output.synctex.gz', error => {
if (error) {
return callback(error)
}
return CommandRunner.run(
compileName,
command,
directory,
Settings.clsi != null ? Settings.clsi.docker.image : undefined,
timeout,
{},
compileGroup,
function(error, output) {
if (error != null) {
logger.err(
{ err: error, command, project_id, user_id },
'error running synctex'
)
return callback(error)
}
return callback(null, output.stdout)
}
)
})
},
_parseSynctexFromCodeOutput(output) {
const results = []
for (const line of Array.from(output.split('\n'))) {
const [node, page, h, v, width, height] = Array.from(line.split('\t'))
if (node === 'NODE') {
results.push({
page: parseInt(page, 10),
h: parseFloat(h),
v: parseFloat(v),
height: parseFloat(height),
width: parseFloat(width)
})
}
}
return results
},
_parseSynctexFromPdfOutput(output, base_dir) {
const results = []
for (let line of Array.from(output.split('\n'))) {
let column, file_path, node
;[node, file_path, line, column] = Array.from(line.split('\t'))
if (node === 'NODE') {
const file = file_path.slice(base_dir.length + 1)
results.push({
file,
line: parseInt(line, 10),
column: parseInt(column, 10)
})
}
}
return results
},
wordcount(project_id, user_id, file_name, image, callback) {
if (callback == null) {
callback = function(error, pdfPositions) {}
}
logger.log({ project_id, user_id, file_name, image }, 'running wordcount')
const file_path = `$COMPILE_DIR/${file_name}`
const command = [
'texcount',
'-nocol',
'-inc',
file_path,
`-out=${file_path}.wc`
]
const compileDir = getCompileDir(project_id, user_id)
const timeout = 60 * 1000
const compileName = getCompileName(project_id, user_id)
const compileGroup = 'wordcount'
return fse.ensureDir(compileDir, function(error) {
if (error != null) {
logger.err(
{ error, project_id, user_id, file_name },
'error ensuring dir for sync from code'
)
return callback(error)
}
return CommandRunner.run(
compileName,
command,
compileDir,
image,
timeout,
{},
compileGroup,
function(error) {
if (error != null) {
return callback(error)
}
return fs.readFile(
compileDir + '/' + file_name + '.wc',
'utf-8',
function(err, stdout) {
if (err != null) {
// call it node_err so sentry doesn't use random path error as unique id so it can't be ignored
logger.err(
{ node_err: err, command, compileDir, project_id, user_id },
'error reading word count output'
)
return callback(err)
}
const results = CompileManager._parseWordcountFromOutput(stdout)
logger.log(
{ project_id, user_id, wordcount: results },
'word count results'
)
return callback(null, results)
}
)
}
)
})
},
_parseWordcountFromOutput(output) {
const results = {
encode: '',
textWords: 0,
headWords: 0,
outside: 0,
headers: 0,
elements: 0,
mathInline: 0,
mathDisplay: 0,
errors: 0,
messages: ''
}
for (const line of Array.from(output.split('\n'))) {
const [data, info] = Array.from(line.split(':'))
if (data.indexOf('Encoding') > -1) {
results.encode = info.trim()
}
if (data.indexOf('in text') > -1) {
results.textWords = parseInt(info, 10)
}
if (data.indexOf('in head') > -1) {
results.headWords = parseInt(info, 10)
}
if (data.indexOf('outside') > -1) {
results.outside = parseInt(info, 10)
}
if (data.indexOf('of head') > -1) {
results.headers = parseInt(info, 10)
}
if (data.indexOf('Number of floats/tables/figures') > -1) {
results.elements = parseInt(info, 10)
}
if (data.indexOf('Number of math inlines') > -1) {
results.mathInline = parseInt(info, 10)
}
if (data.indexOf('Number of math displayed') > -1) {
results.mathDisplay = parseInt(info, 10)
}
if (data === '(errors') {
// errors reported as (errors:123)
results.errors = parseInt(info, 10)
}
if (line.indexOf('!!! ') > -1) {
// errors logged as !!! message !!!
results.messages += line + '\n'
}
}
return results
}
}
function __guard__(value, transform) {
return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
}

View File

@@ -0,0 +1,38 @@
/* eslint-disable
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
let ContentTypeMapper
const Path = require('path')
// here we coerce html, css and js to text/plain,
// otherwise choose correct mime type based on file extension,
// falling back to octet-stream
module.exports = ContentTypeMapper = {
map(path) {
switch (Path.extname(path)) {
case '.txt':
case '.html':
case '.js':
case '.css':
case '.svg':
return 'text/plain'
case '.csv':
return 'text/csv'
case '.pdf':
return 'application/pdf'
case '.png':
return 'image/png'
case '.jpg':
case '.jpeg':
return 'image/jpeg'
case '.tiff':
return 'image/tiff'
case '.gif':
return 'image/gif'
default:
return 'application/octet-stream'
}
}
}

18
app/js/DbQueue.js Normal file
View File

@@ -0,0 +1,18 @@
// TODO: This file was created by bulk-decaffeinate.
// Sanity-check the conversion and remove this comment.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const async = require('async')
const Settings = require('settings-sharelatex')
const logger = require('logger-sharelatex')
const queue = async.queue(
(task, cb) => task(cb),
Settings.parallelSqlQueryLimit
)
queue.drain = () => logger.debug('all items have been processed')
module.exports = { queue }

113
app/js/DockerLockManager.js Normal file
View File

@@ -0,0 +1,113 @@
/* eslint-disable
handle-callback-err,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let LockManager
const logger = require('logger-sharelatex')
const LockState = {} // locks for docker container operations, by container name
module.exports = LockManager = {
MAX_LOCK_HOLD_TIME: 15000, // how long we can keep a lock
MAX_LOCK_WAIT_TIME: 10000, // how long we wait for a lock
LOCK_TEST_INTERVAL: 1000, // retry time
tryLock(key, callback) {
let lockValue
if (callback == null) {
callback = function(err, gotLock) {}
}
const existingLock = LockState[key]
if (existingLock != null) {
// the lock is already taken, check how old it is
const lockAge = Date.now() - existingLock.created
if (lockAge < LockManager.MAX_LOCK_HOLD_TIME) {
return callback(null, false) // we didn't get the lock, bail out
} else {
logger.error(
{ key, lock: existingLock, age: lockAge },
'taking old lock by force'
)
}
}
// take the lock
LockState[key] = lockValue = { created: Date.now() }
return callback(null, true, lockValue)
},
getLock(key, callback) {
let attempt
if (callback == null) {
callback = function(error, lockValue) {}
}
const startTime = Date.now()
return (attempt = () =>
LockManager.tryLock(key, function(error, gotLock, lockValue) {
if (error != null) {
return callback(error)
}
if (gotLock) {
return callback(null, lockValue)
} else if (Date.now() - startTime > LockManager.MAX_LOCK_WAIT_TIME) {
const e = new Error('Lock timeout')
e.key = key
return callback(e)
} else {
return setTimeout(attempt, LockManager.LOCK_TEST_INTERVAL)
}
}))()
},
releaseLock(key, lockValue, callback) {
if (callback == null) {
callback = function(error) {}
}
const existingLock = LockState[key]
if (existingLock === lockValue) {
// lockValue is an object, so we can test by reference
delete LockState[key] // our lock, so we can free it
return callback()
} else if (existingLock != null) {
// lock exists but doesn't match ours
logger.error(
{ key, lock: existingLock },
'tried to release lock taken by force'
)
return callback()
} else {
logger.error(
{ key, lock: existingLock },
'tried to release lock that has gone'
)
return callback()
}
},
runWithLock(key, runner, callback) {
if (callback == null) {
callback = function(error) {}
}
return LockManager.getLock(key, function(error, lockValue) {
if (error != null) {
return callback(error)
}
return runner((error1, ...args) =>
LockManager.releaseLock(key, lockValue, function(error2) {
error = error1 || error2
if (error != null) {
return callback(error)
}
return callback(null, ...Array.from(args))
})
)
})
}
}

732
app/js/DockerRunner.js Normal file
View File

@@ -0,0 +1,732 @@
/* eslint-disable
camelcase,
handle-callback-err,
no-return-assign,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS103: Rewrite code to no longer use __guard__
* DS205: Consider reworking code to avoid use of IIFEs
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let DockerRunner, oneHour
const Settings = require('settings-sharelatex')
const logger = require('logger-sharelatex')
const Docker = require('dockerode')
const dockerode = new Docker()
const crypto = require('crypto')
const async = require('async')
const LockManager = require('./DockerLockManager')
const fs = require('fs')
const Path = require('path')
const _ = require('lodash')
logger.info('using docker runner')
const usingSiblingContainers = () =>
__guard__(
Settings != null ? Settings.path : undefined,
x => x.sandboxedCompilesHostDir
) != null
let containerMonitorTimeout
let containerMonitorInterval
module.exports = DockerRunner = {
ERR_NOT_DIRECTORY: new Error('not a directory'),
ERR_TERMINATED: new Error('terminated'),
ERR_EXITED: new Error('exited'),
ERR_TIMED_OUT: new Error('container timed out'),
run(
project_id,
command,
directory,
image,
timeout,
environment,
compileGroup,
callback
) {
let name
if (callback == null) {
callback = function(error, output) {}
}
if (usingSiblingContainers()) {
const _newPath = Settings.path.sandboxedCompilesHostDir
logger.log(
{ path: _newPath },
'altering bind path for sibling containers'
)
// Server Pro, example:
// '/var/lib/sharelatex/data/compiles/<project-id>'
// ... becomes ...
// '/opt/sharelatex_data/data/compiles/<project-id>'
directory = Path.join(
Settings.path.sandboxedCompilesHostDir,
Path.basename(directory)
)
}
const volumes = {}
volumes[directory] = '/compile'
command = Array.from(command).map(arg =>
__guardMethod__(arg.toString(), 'replace', o =>
o.replace('$COMPILE_DIR', '/compile')
)
)
if (image == null) {
;({ image } = Settings.clsi.docker)
}
if (Settings.texliveImageNameOveride != null) {
const img = image.split('/')
image = `${Settings.texliveImageNameOveride}/${img[2]}`
}
const options = DockerRunner._getContainerOptions(
command,
image,
volumes,
timeout,
environment,
compileGroup
)
const fingerprint = DockerRunner._fingerprintContainer(options)
options.name = name = `project-${project_id}-${fingerprint}`
// logOptions = _.clone(options)
// logOptions?.HostConfig?.SecurityOpt = "secomp used, removed in logging"
logger.log({ project_id }, 'running docker container')
DockerRunner._runAndWaitForContainer(options, volumes, timeout, function(
error,
output
) {
if (error && error.statusCode === 500) {
logger.log(
{ err: error, project_id },
'error running container so destroying and retrying'
)
return DockerRunner.destroyContainer(name, null, true, function(error) {
if (error != null) {
return callback(error)
}
return DockerRunner._runAndWaitForContainer(
options,
volumes,
timeout,
callback
)
})
} else {
return callback(error, output)
}
})
return name
}, // pass back the container name to allow it to be killed
kill(container_id, callback) {
if (callback == null) {
callback = function(error) {}
}
logger.log({ container_id }, 'sending kill signal to container')
const container = dockerode.getContainer(container_id)
return container.kill(function(error) {
if (
error != null &&
__guardMethod__(error != null ? error.message : undefined, 'match', o =>
o.match(/Cannot kill container .* is not running/)
)
) {
logger.warn(
{ err: error, container_id },
'container not running, continuing'
)
error = null
}
if (error != null) {
logger.error({ err: error, container_id }, 'error killing container')
return callback(error)
} else {
return callback()
}
})
},
_runAndWaitForContainer(options, volumes, timeout, _callback) {
if (_callback == null) {
_callback = function(error, output) {}
}
const callback = function(...args) {
_callback(...Array.from(args || []))
// Only call the callback once
return (_callback = function() {})
}
const { name } = options
let streamEnded = false
let containerReturned = false
let output = {}
const callbackIfFinished = function() {
if (streamEnded && containerReturned) {
return callback(null, output)
}
}
const attachStreamHandler = function(error, _output) {
if (error != null) {
return callback(error)
}
output = _output
streamEnded = true
return callbackIfFinished()
}
return DockerRunner.startContainer(
options,
volumes,
attachStreamHandler,
function(error, containerId) {
if (error != null) {
return callback(error)
}
return DockerRunner.waitForContainer(name, timeout, function(
error,
exitCode
) {
let err
if (error != null) {
return callback(error)
}
if (exitCode === 137) {
// exit status from kill -9
err = DockerRunner.ERR_TERMINATED
err.terminated = true
return callback(err)
}
if (exitCode === 1) {
// exit status from chktex
err = DockerRunner.ERR_EXITED
err.code = exitCode
return callback(err)
}
containerReturned = true
__guard__(
options != null ? options.HostConfig : undefined,
x => (x.SecurityOpt = null)
) // small log line
logger.log({ err, exitCode, options }, 'docker container has exited')
return callbackIfFinished()
})
}
)
},
_getContainerOptions(
command,
image,
volumes,
timeout,
environment,
compileGroup
) {
let m, year
let key, value, hostVol, dockerVol
const timeoutInSeconds = timeout / 1000
const dockerVolumes = {}
for (hostVol in volumes) {
dockerVol = volumes[hostVol]
dockerVolumes[dockerVol] = {}
if (volumes[hostVol].slice(-3).indexOf(':r') === -1) {
volumes[hostVol] = `${dockerVol}:rw`
}
}
// merge settings and environment parameter
const env = {}
for (const src of [Settings.clsi.docker.env, environment || {}]) {
for (key in src) {
value = src[key]
env[key] = value
}
}
// set the path based on the image year
if ((m = image.match(/:([0-9]+)\.[0-9]+/))) {
year = m[1]
} else {
year = '2014'
}
env.PATH = `/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/${year}/bin/x86_64-linux/`
const options = {
Cmd: command,
Image: image,
Volumes: dockerVolumes,
WorkingDir: '/compile',
NetworkDisabled: true,
Memory: 1024 * 1024 * 1024 * 1024, // 1 Gb
User: Settings.clsi.docker.user,
Env: (() => {
const result = []
for (key in env) {
value = env[key]
result.push(`${key}=${value}`)
}
return result
})(), // convert the environment hash to an array
HostConfig: {
Binds: (() => {
const result1 = []
for (hostVol in volumes) {
dockerVol = volumes[hostVol]
result1.push(`${hostVol}:${dockerVol}`)
}
return result1
})(),
LogConfig: { Type: 'none', Config: {} },
Ulimits: [
{
Name: 'cpu',
Soft: timeoutInSeconds + 5,
Hard: timeoutInSeconds + 10
}
],
CapDrop: 'ALL',
SecurityOpt: ['no-new-privileges']
}
}
if (
(Settings.path != null ? Settings.path.synctexBinHostPath : undefined) !=
null
) {
options.HostConfig.Binds.push(
`${Settings.path.synctexBinHostPath}:/opt/synctex:ro`
)
}
if (Settings.clsi.docker.seccomp_profile != null) {
options.HostConfig.SecurityOpt.push(
`seccomp=${Settings.clsi.docker.seccomp_profile}`
)
}
if (Settings.clsi.docker.runtime) {
options.HostConfig.Runtime = Settings.clsi.docker.runtime
}
if (Settings.clsi.docker.Readonly) {
options.HostConfig.ReadonlyRootfs = true
options.HostConfig.Tmpfs = { '/tmp': 'rw,noexec,nosuid,size=65536k' }
}
// Allow per-compile group overriding of individual settings
if (
Settings.clsi.docker.compileGroupConfig &&
Settings.clsi.docker.compileGroupConfig[compileGroup]
) {
const override = Settings.clsi.docker.compileGroupConfig[compileGroup]
let key
for (key in override) {
_.set(options, key, override[key])
}
}
return options
},
_fingerprintContainer(containerOptions) {
// Yay, Hashing!
const json = JSON.stringify(containerOptions)
return crypto
.createHash('md5')
.update(json)
.digest('hex')
},
startContainer(options, volumes, attachStreamHandler, callback) {
return LockManager.runWithLock(
options.name,
releaseLock =>
// Check that volumes exist before starting the container.
// When a container is started with volume pointing to a
// non-existent directory then docker creates the directory but
// with root ownership.
DockerRunner._checkVolumes(options, volumes, function(err) {
if (err != null) {
return releaseLock(err)
}
return DockerRunner._startContainer(
options,
volumes,
attachStreamHandler,
releaseLock
)
}),
callback
)
},
// Check that volumes exist and are directories
_checkVolumes(options, volumes, callback) {
if (callback == null) {
callback = function(error, containerName) {}
}
if (usingSiblingContainers()) {
// Server Pro, with sibling-containers active, skip checks
return callback(null)
}
const checkVolume = (path, cb) =>
fs.stat(path, function(err, stats) {
if (err != null) {
return cb(err)
}
if (!(stats != null ? stats.isDirectory() : undefined)) {
return cb(DockerRunner.ERR_NOT_DIRECTORY)
}
return cb()
})
const jobs = []
for (const vol in volumes) {
;(vol => jobs.push(cb => checkVolume(vol, cb)))(vol)
}
return async.series(jobs, callback)
},
_startContainer(options, volumes, attachStreamHandler, callback) {
if (callback == null) {
callback = function(error, output) {}
}
callback = _.once(callback)
const { name } = options
logger.log({ container_name: name }, 'starting container')
const container = dockerode.getContainer(name)
const createAndStartContainer = () =>
dockerode.createContainer(options, function(error, container) {
if (error != null) {
return callback(error)
}
return startExistingContainer()
})
var startExistingContainer = () =>
DockerRunner.attachToContainer(
options.name,
attachStreamHandler,
function(error) {
if (error != null) {
return callback(error)
}
return container.start(function(error) {
if (
error != null &&
(error != null ? error.statusCode : undefined) !== 304
) {
// already running
return callback(error)
} else {
return callback()
}
})
}
)
return container.inspect(function(error, stats) {
if ((error != null ? error.statusCode : undefined) === 404) {
return createAndStartContainer()
} else if (error != null) {
logger.err(
{ container_name: name, error },
'unable to inspect container to start'
)
return callback(error)
} else {
return startExistingContainer()
}
})
},
attachToContainer(containerId, attachStreamHandler, attachStartCallback) {
const container = dockerode.getContainer(containerId)
return container.attach({ stdout: 1, stderr: 1, stream: 1 }, function(
error,
stream
) {
if (error != null) {
logger.error(
{ err: error, container_id: containerId },
'error attaching to container'
)
return attachStartCallback(error)
} else {
attachStartCallback()
}
logger.log({ container_id: containerId }, 'attached to container')
const MAX_OUTPUT = 1024 * 1024 // limit output to 1MB
const createStringOutputStream = function(name) {
return {
data: '',
overflowed: false,
write(data) {
if (this.overflowed) {
return
}
if (this.data.length < MAX_OUTPUT) {
return (this.data += data)
} else {
logger.error(
{
container_id: containerId,
length: this.data.length,
maxLen: MAX_OUTPUT
},
`${name} exceeds max size`
)
this.data += `(...truncated at ${MAX_OUTPUT} chars...)`
return (this.overflowed = true)
}
}
// kill container if too much output
// docker.containers.kill(containerId, () ->)
}
}
const stdout = createStringOutputStream('stdout')
const stderr = createStringOutputStream('stderr')
container.modem.demuxStream(stream, stdout, stderr)
stream.on('error', err =>
logger.error(
{ err, container_id: containerId },
'error reading from container stream'
)
)
return stream.on('end', () =>
attachStreamHandler(null, { stdout: stdout.data, stderr: stderr.data })
)
})
},
waitForContainer(containerId, timeout, _callback) {
if (_callback == null) {
_callback = function(error, exitCode) {}
}
const callback = function(...args) {
_callback(...Array.from(args || []))
// Only call the callback once
return (_callback = function() {})
}
const container = dockerode.getContainer(containerId)
let timedOut = false
const timeoutId = setTimeout(function() {
timedOut = true
logger.log(
{ container_id: containerId },
'timeout reached, killing container'
)
return container.kill(function() {})
}, timeout)
logger.log({ container_id: containerId }, 'waiting for docker container')
return container.wait(function(error, res) {
if (error != null) {
clearTimeout(timeoutId)
logger.error(
{ err: error, container_id: containerId },
'error waiting for container'
)
return callback(error)
}
if (timedOut) {
logger.log({ containerId }, 'docker container timed out')
error = DockerRunner.ERR_TIMED_OUT
error.timedout = true
return callback(error)
} else {
clearTimeout(timeoutId)
logger.log(
{ container_id: containerId, exitCode: res.StatusCode },
'docker container returned'
)
return callback(null, res.StatusCode)
}
})
},
destroyContainer(containerName, containerId, shouldForce, callback) {
// We want the containerName for the lock and, ideally, the
// containerId to delete. There is a bug in the docker.io module
// where if you delete by name and there is an error, it throws an
// async exception, but if you delete by id it just does a normal
// error callback. We fall back to deleting by name if no id is
// supplied.
if (callback == null) {
callback = function(error) {}
}
return LockManager.runWithLock(
containerName,
releaseLock =>
DockerRunner._destroyContainer(
containerId || containerName,
shouldForce,
releaseLock
),
callback
)
},
_destroyContainer(containerId, shouldForce, callback) {
if (callback == null) {
callback = function(error) {}
}
logger.log({ container_id: containerId }, 'destroying docker container')
const container = dockerode.getContainer(containerId)
return container.remove({ force: shouldForce === true }, function(error) {
if (
error != null &&
(error != null ? error.statusCode : undefined) === 404
) {
logger.warn(
{ err: error, container_id: containerId },
'container not found, continuing'
)
error = null
}
if (error != null) {
logger.error(
{ err: error, container_id: containerId },
'error destroying container'
)
} else {
logger.log({ container_id: containerId }, 'destroyed container')
}
return callback(error)
})
},
// handle expiry of docker containers
MAX_CONTAINER_AGE:
Settings.clsi.docker.maxContainerAge || (oneHour = 60 * 60 * 1000),
examineOldContainer(container, callback) {
if (callback == null) {
callback = function(error, name, id, ttl) {}
}
const name =
container.Name ||
(container.Names != null ? container.Names[0] : undefined)
const created = container.Created * 1000 // creation time is returned in seconds
const now = Date.now()
const age = now - created
const maxAge = DockerRunner.MAX_CONTAINER_AGE
const ttl = maxAge - age
logger.log(
{ containerName: name, created, now, age, maxAge, ttl },
'checking whether to destroy container'
)
return callback(null, name, container.Id, ttl)
},
destroyOldContainers(callback) {
if (callback == null) {
callback = function(error) {}
}
return dockerode.listContainers({ all: true }, function(error, containers) {
if (error != null) {
return callback(error)
}
const jobs = []
for (const container of Array.from(containers || [])) {
;(container =>
DockerRunner.examineOldContainer(container, function(
err,
name,
id,
ttl
) {
if (name.slice(0, 9) === '/project-' && ttl <= 0) {
// strip the / prefix
// the LockManager uses the plain container name
name = name.slice(1)
return jobs.push(cb =>
DockerRunner.destroyContainer(name, id, false, () => cb())
)
}
}))(container)
}
// Ignore errors because some containers get stuck but
// will be destroyed next time
return async.series(jobs, callback)
})
},
startContainerMonitor() {
logger.log(
{ maxAge: DockerRunner.MAX_CONTAINER_AGE },
'starting container expiry'
)
// guarantee only one monitor is running
DockerRunner.stopContainerMonitor()
// randomise the start time
const randomDelay = Math.floor(Math.random() * 5 * 60 * 1000)
containerMonitorTimeout = setTimeout(() => {
containerMonitorInterval = setInterval(
() => DockerRunner.destroyOldContainers(),
(oneHour = 60 * 60 * 1000)
)
}, randomDelay)
},
stopContainerMonitor() {
if (containerMonitorTimeout) {
clearTimeout(containerMonitorTimeout)
containerMonitorTimeout = undefined
}
if (containerMonitorInterval) {
clearInterval(containerMonitorTimeout)
containerMonitorTimeout = undefined
}
}
}
DockerRunner.startContainerMonitor()
function __guard__(value, transform) {
return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
}
function __guardMethod__(obj, methodName, transform) {
if (
typeof obj !== 'undefined' &&
obj !== null &&
typeof obj[methodName] === 'function'
) {
return transform(obj, methodName)
} else {
return undefined
}
}

View File

@@ -0,0 +1,57 @@
/* eslint-disable
camelcase,
handle-callback-err,
no-useless-escape,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let DraftModeManager
const fs = require('fs')
const logger = require('logger-sharelatex')
module.exports = DraftModeManager = {
injectDraftMode(filename, callback) {
if (callback == null) {
callback = function(error) {}
}
return fs.readFile(filename, 'utf8', function(error, content) {
if (error != null) {
return callback(error)
}
// avoid adding draft mode more than once
if (
(content != null
? content.indexOf('\\documentclass[draft')
: undefined) >= 0
) {
return callback()
}
const modified_content = DraftModeManager._injectDraftOption(content)
logger.log(
{
content: content.slice(0, 1024), // \documentclass is normally v near the top
modified_content: modified_content.slice(0, 1024),
filename
},
'injected draft class'
)
return fs.writeFile(filename, modified_content, callback)
})
},
_injectDraftOption(content) {
return (
content
// With existing options (must be first, otherwise both are applied)
.replace(/\\documentclass\[/g, '\\documentclass[draft,')
// Without existing options
.replace(/\\documentclass\{/g, '\\documentclass[draft]{')
)
}
}

36
app/js/Errors.js Normal file
View File

@@ -0,0 +1,36 @@
/* eslint-disable
no-proto,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
let Errors
var NotFoundError = function(message) {
const error = new Error(message)
error.name = 'NotFoundError'
error.__proto__ = NotFoundError.prototype
return error
}
NotFoundError.prototype.__proto__ = Error.prototype
var FilesOutOfSyncError = function(message) {
const error = new Error(message)
error.name = 'FilesOutOfSyncError'
error.__proto__ = FilesOutOfSyncError.prototype
return error
}
FilesOutOfSyncError.prototype.__proto__ = Error.prototype
var AlreadyCompilingError = function(message) {
const error = new Error(message)
error.name = 'AlreadyCompilingError'
error.__proto__ = AlreadyCompilingError.prototype
return error
}
AlreadyCompilingError.prototype.__proto__ = Error.prototype
module.exports = Errors = {
NotFoundError,
FilesOutOfSyncError,
AlreadyCompilingError
}

243
app/js/LatexRunner.js Normal file
View File

@@ -0,0 +1,243 @@
/* eslint-disable
camelcase,
handle-callback-err,
no-return-assign,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS103: Rewrite code to no longer use __guard__
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let LatexRunner
const Path = require('path')
const Settings = require('settings-sharelatex')
const logger = require('logger-sharelatex')
const Metrics = require('./Metrics')
const CommandRunner = require('./CommandRunner')
const fs = require('fs')
const ProcessTable = {} // table of currently running jobs (pids or docker container names)
module.exports = LatexRunner = {
runLatex(project_id, options, callback) {
let command
if (callback == null) {
callback = function(error) {}
}
let {
directory,
mainFile,
compiler,
timeout,
image,
environment,
flags,
compileGroup
} = options
if (!compiler) {
compiler = 'pdflatex'
}
if (!timeout) {
timeout = 60000
} // milliseconds
logger.log(
{
directory,
compiler,
timeout,
mainFile,
environment,
flags,
compileGroup
},
'starting compile'
)
// We want to run latexmk on the tex file which we will automatically
// generate from the Rtex/Rmd/md file.
mainFile = mainFile.replace(/\.(Rtex|md|Rmd)$/, '.tex')
if (compiler === 'pdflatex') {
command = LatexRunner._pdflatexCommand(mainFile, flags)
} else if (compiler === 'latex') {
command = LatexRunner._latexCommand(mainFile, flags)
} else if (compiler === 'xelatex') {
command = LatexRunner._xelatexCommand(mainFile, flags)
} else if (compiler === 'lualatex') {
command = LatexRunner._lualatexCommand(mainFile, flags)
} else {
return callback(new Error(`unknown compiler: ${compiler}`))
}
if (Settings.clsi != null ? Settings.clsi.strace : undefined) {
command = ['strace', '-o', 'strace', '-ff'].concat(command)
}
const id = `${project_id}` // record running project under this id
return (ProcessTable[id] = CommandRunner.run(
project_id,
command,
directory,
image,
timeout,
environment,
compileGroup,
function(error, output) {
delete ProcessTable[id]
if (error != null) {
return callback(error)
}
const runs =
__guard__(
__guard__(output != null ? output.stderr : undefined, x1 =>
x1.match(/^Run number \d+ of .*latex/gm)
),
x => x.length
) || 0
const failed =
__guard__(output != null ? output.stdout : undefined, x2 =>
x2.match(/^Latexmk: Errors/m)
) != null
? 1
: 0
// counters from latexmk output
const stats = {}
stats['latexmk-errors'] = failed
stats['latex-runs'] = runs
stats['latex-runs-with-errors'] = failed ? runs : 0
stats[`latex-runs-${runs}`] = 1
stats[`latex-runs-with-errors-${runs}`] = failed ? 1 : 0
// timing information from /usr/bin/time
const timings = {}
const stderr = output != null ? output.stderr : undefined
timings['cpu-percent'] =
__guard__(
stderr != null
? stderr.match(/Percent of CPU this job got: (\d+)/m)
: undefined,
x3 => x3[1]
) || 0
timings['cpu-time'] =
__guard__(
stderr != null
? stderr.match(/User time.*: (\d+.\d+)/m)
: undefined,
x4 => x4[1]
) || 0
timings['sys-time'] =
__guard__(
stderr != null
? stderr.match(/System time.*: (\d+.\d+)/m)
: undefined,
x5 => x5[1]
) || 0
// record output files
LatexRunner.writeLogOutput(project_id, directory, output, () => {
return callback(error, output, stats, timings)
})
}
))
},
writeLogOutput(project_id, directory, output, callback) {
if (!output) {
return callback()
}
// internal method for writing non-empty log files
function _writeFile(file, content, cb) {
if (content && content.length > 0) {
fs.writeFile(file, content, err => {
if (err) {
logger.error({ project_id, file }, 'error writing log file') // don't fail on error
}
cb()
})
} else {
cb()
}
}
// write stdout and stderr, ignoring errors
_writeFile(Path.join(directory, 'output.stdout'), output.stdout, () => {
_writeFile(Path.join(directory, 'output.stderr'), output.stderr, () => {
callback()
})
})
},
killLatex(project_id, callback) {
if (callback == null) {
callback = function(error) {}
}
const id = `${project_id}`
logger.log({ id }, 'killing running compile')
if (ProcessTable[id] == null) {
logger.warn({ id }, 'no such project to kill')
return callback(null)
} else {
return CommandRunner.kill(ProcessTable[id], callback)
}
},
_latexmkBaseCommand(flags) {
let args = [
'latexmk',
'-cd',
'-f',
'-jobname=output',
'-auxdir=$COMPILE_DIR',
'-outdir=$COMPILE_DIR',
'-synctex=1',
'-interaction=batchmode'
]
if (flags) {
args = args.concat(flags)
}
return (
__guard__(
Settings != null ? Settings.clsi : undefined,
x => x.latexmkCommandPrefix
) || []
).concat(args)
},
_pdflatexCommand(mainFile, flags) {
return LatexRunner._latexmkBaseCommand(flags).concat([
'-pdf',
Path.join('$COMPILE_DIR', mainFile)
])
},
_latexCommand(mainFile, flags) {
return LatexRunner._latexmkBaseCommand(flags).concat([
'-pdfdvi',
Path.join('$COMPILE_DIR', mainFile)
])
},
_xelatexCommand(mainFile, flags) {
return LatexRunner._latexmkBaseCommand(flags).concat([
'-xelatex',
Path.join('$COMPILE_DIR', mainFile)
])
},
_lualatexCommand(mainFile, flags) {
return LatexRunner._latexmkBaseCommand(flags).concat([
'-lualatex',
Path.join('$COMPILE_DIR', mainFile)
])
}
}
function __guard__(value, transform) {
return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
}

View File

@@ -0,0 +1,100 @@
/* eslint-disable
camelcase,
handle-callback-err,
no-return-assign,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let CommandRunner
const { spawn } = require('child_process')
const logger = require('logger-sharelatex')
logger.info('using standard command runner')
module.exports = CommandRunner = {
run(
project_id,
command,
directory,
image,
timeout,
environment,
compileGroup,
callback
) {
let key, value
if (callback == null) {
callback = function(error) {}
}
command = Array.from(command).map(arg =>
arg.toString().replace('$COMPILE_DIR', directory)
)
logger.log({ project_id, command, directory }, 'running command')
logger.warn('timeouts and sandboxing are not enabled with CommandRunner')
// merge environment settings
const env = {}
for (key in process.env) {
value = process.env[key]
env[key] = value
}
for (key in environment) {
value = environment[key]
env[key] = value
}
// run command as detached process so it has its own process group (which can be killed if needed)
const proc = spawn(command[0], command.slice(1), { cwd: directory, env })
let stdout = ''
proc.stdout.setEncoding('utf8').on('data', data => (stdout += data))
proc.on('error', function(err) {
logger.err(
{ err, project_id, command, directory },
'error running command'
)
return callback(err)
})
proc.on('close', function(code, signal) {
let err
logger.info({ code, signal, project_id }, 'command exited')
if (signal === 'SIGTERM') {
// signal from kill method below
err = new Error('terminated')
err.terminated = true
return callback(err)
} else if (code === 1) {
// exit status from chktex
err = new Error('exited')
err.code = code
return callback(err)
} else {
return callback(null, { stdout: stdout })
}
})
return proc.pid
}, // return process id to allow job to be killed if necessary
kill(pid, callback) {
if (callback == null) {
callback = function(error) {}
}
try {
process.kill(-pid) // kill all processes in group
} catch (err) {
return callback(err)
}
return callback()
}
}

72
app/js/LockManager.js Normal file
View File

@@ -0,0 +1,72 @@
/* eslint-disable
handle-callback-err,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let LockManager
const Settings = require('settings-sharelatex')
const logger = require('logger-sharelatex')
const Lockfile = require('lockfile') // from https://github.com/npm/lockfile
const Errors = require('./Errors')
const fs = require('fs')
const Path = require('path')
module.exports = LockManager = {
LOCK_TEST_INTERVAL: 1000, // 50ms between each test of the lock
MAX_LOCK_WAIT_TIME: 15000, // 10s maximum time to spend trying to get the lock
LOCK_STALE: 5 * 60 * 1000, // 5 mins time until lock auto expires
runWithLock(path, runner, callback) {
if (callback == null) {
callback = function(error) {}
}
const lockOpts = {
wait: this.MAX_LOCK_WAIT_TIME,
pollPeriod: this.LOCK_TEST_INTERVAL,
stale: this.LOCK_STALE
}
return Lockfile.lock(path, lockOpts, function(error) {
if ((error != null ? error.code : undefined) === 'EEXIST') {
return callback(new Errors.AlreadyCompilingError('compile in progress'))
} else if (error != null) {
return fs.lstat(path, (statLockErr, statLock) =>
fs.lstat(Path.dirname(path), (statDirErr, statDir) =>
fs.readdir(Path.dirname(path), function(readdirErr, readdirDir) {
logger.err(
{
error,
path,
statLock,
statLockErr,
statDir,
statDirErr,
readdirErr,
readdirDir
},
'unable to get lock'
)
return callback(error)
})
)
)
} else {
return runner((error1, ...args) =>
Lockfile.unlock(path, function(error2) {
error = error1 || error2
if (error != null) {
return callback(error)
}
return callback(null, ...Array.from(args))
})
)
}
})
}
}

3
app/js/Metrics.js Normal file
View File

@@ -0,0 +1,3 @@
// TODO: This file was created by bulk-decaffeinate.
// Sanity-check the conversion and remove this comment.
module.exports = require('metrics-sharelatex')

View File

@@ -0,0 +1,399 @@
/* eslint-disable
handle-callback-err,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS103: Rewrite code to no longer use __guard__
* DS104: Avoid inline assignments
* DS204: Change includes calls to have a more natural evaluation order
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let OutputCacheManager
const async = require('async')
const fs = require('fs')
const fse = require('fs-extra')
const Path = require('path')
const logger = require('logger-sharelatex')
const _ = require('lodash')
const Settings = require('settings-sharelatex')
const crypto = require('crypto')
const OutputFileOptimiser = require('./OutputFileOptimiser')
module.exports = OutputCacheManager = {
CACHE_SUBDIR: '.cache/clsi',
ARCHIVE_SUBDIR: '.archive/clsi',
// build id is HEXDATE-HEXRANDOM from Date.now()and RandomBytes
// for backwards compatibility, make the randombytes part optional
BUILD_REGEX: /^[0-9a-f]+(-[0-9a-f]+)?$/,
CACHE_LIMIT: 2, // maximum number of cache directories
CACHE_AGE: 60 * 60 * 1000, // up to one hour old
path(buildId, file) {
// used by static server, given build id return '.cache/clsi/buildId'
if (buildId.match(OutputCacheManager.BUILD_REGEX)) {
return Path.join(OutputCacheManager.CACHE_SUBDIR, buildId, file)
} else {
// for invalid build id, return top level
return file
}
},
generateBuildId(callback) {
// generate a secure build id from Date.now() and 8 random bytes in hex
if (callback == null) {
callback = function(error, buildId) {}
}
return crypto.randomBytes(8, function(err, buf) {
if (err != null) {
return callback(err)
}
const random = buf.toString('hex')
const date = Date.now().toString(16)
return callback(err, `${date}-${random}`)
})
},
saveOutputFiles(outputFiles, compileDir, callback) {
if (callback == null) {
callback = function(error) {}
}
return OutputCacheManager.generateBuildId(function(err, buildId) {
if (err != null) {
return callback(err)
}
return OutputCacheManager.saveOutputFilesInBuildDir(
outputFiles,
compileDir,
buildId,
callback
)
})
},
saveOutputFilesInBuildDir(outputFiles, compileDir, buildId, callback) {
// make a compileDir/CACHE_SUBDIR/build_id directory and
// copy all the output files into it
if (callback == null) {
callback = function(error) {}
}
const cacheRoot = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR)
// Put the files into a new cache subdirectory
const cacheDir = Path.join(
compileDir,
OutputCacheManager.CACHE_SUBDIR,
buildId
)
// Is it a per-user compile? check if compile directory is PROJECTID-USERID
const perUser = Path.basename(compileDir).match(
/^[0-9a-f]{24}-[0-9a-f]{24}$/
)
// Archive logs in background
if (
(Settings.clsi != null ? Settings.clsi.archive_logs : undefined) ||
(Settings.clsi != null ? Settings.clsi.strace : undefined)
) {
OutputCacheManager.archiveLogs(outputFiles, compileDir, buildId, function(
err
) {
if (err != null) {
return logger.warn({ err }, 'erroring archiving log files')
}
})
}
// make the new cache directory
return fse.ensureDir(cacheDir, function(err) {
if (err != null) {
logger.error(
{ err, directory: cacheDir },
'error creating cache directory'
)
return callback(err, outputFiles)
} else {
// copy all the output files into the new cache directory
const results = []
return async.mapSeries(
outputFiles,
function(file, cb) {
// don't send dot files as output, express doesn't serve them
if (OutputCacheManager._fileIsHidden(file.path)) {
logger.debug(
{ compileDir, path: file.path },
'ignoring dotfile in output'
)
return cb()
}
// copy other files into cache directory if valid
const newFile = _.clone(file)
const [src, dst] = Array.from([
Path.join(compileDir, file.path),
Path.join(cacheDir, file.path)
])
return OutputCacheManager._checkFileIsSafe(src, function(
err,
isSafe
) {
if (err != null) {
return cb(err)
}
if (!isSafe) {
return cb()
}
return OutputCacheManager._checkIfShouldCopy(src, function(
err,
shouldCopy
) {
if (err != null) {
return cb(err)
}
if (!shouldCopy) {
return cb()
}
return OutputCacheManager._copyFile(src, dst, function(err) {
if (err != null) {
return cb(err)
}
newFile.build = buildId // attach a build id if we cached the file
results.push(newFile)
return cb()
})
})
})
},
function(err) {
if (err != null) {
// pass back the original files if we encountered *any* error
callback(err, outputFiles)
// clean up the directory we just created
return fse.remove(cacheDir, function(err) {
if (err != null) {
return logger.error(
{ err, dir: cacheDir },
'error removing cache dir after failure'
)
}
})
} else {
// pass back the list of new files in the cache
callback(err, results)
// let file expiry run in the background, expire all previous files if per-user
return OutputCacheManager.expireOutputFiles(cacheRoot, {
keep: buildId,
limit: perUser ? 1 : null
})
}
}
)
}
})
},
archiveLogs(outputFiles, compileDir, buildId, callback) {
if (callback == null) {
callback = function(error) {}
}
const archiveDir = Path.join(
compileDir,
OutputCacheManager.ARCHIVE_SUBDIR,
buildId
)
logger.log({ dir: archiveDir }, 'archiving log files for project')
return fse.ensureDir(archiveDir, function(err) {
if (err != null) {
return callback(err)
}
return async.mapSeries(
outputFiles,
function(file, cb) {
const [src, dst] = Array.from([
Path.join(compileDir, file.path),
Path.join(archiveDir, file.path)
])
return OutputCacheManager._checkFileIsSafe(src, function(
err,
isSafe
) {
if (err != null) {
return cb(err)
}
if (!isSafe) {
return cb()
}
return OutputCacheManager._checkIfShouldArchive(src, function(
err,
shouldArchive
) {
if (err != null) {
return cb(err)
}
if (!shouldArchive) {
return cb()
}
return OutputCacheManager._copyFile(src, dst, cb)
})
})
},
callback
)
})
},
expireOutputFiles(cacheRoot, options, callback) {
// look in compileDir for build dirs and delete if > N or age of mod time > T
if (callback == null) {
callback = function(error) {}
}
return fs.readdir(cacheRoot, function(err, results) {
if (err != null) {
if (err.code === 'ENOENT') {
return callback(null)
} // cache directory is empty
logger.error({ err, project_id: cacheRoot }, 'error clearing cache')
return callback(err)
}
const dirs = results.sort().reverse()
const currentTime = Date.now()
const isExpired = function(dir, index) {
if ((options != null ? options.keep : undefined) === dir) {
return false
}
// remove any directories over the requested (non-null) limit
if (
(options != null ? options.limit : undefined) != null &&
index > options.limit
) {
return true
}
// remove any directories over the hard limit
if (index > OutputCacheManager.CACHE_LIMIT) {
return true
}
// we can get the build time from the first part of the directory name DDDD-RRRR
// DDDD is date and RRRR is random bytes
const dirTime = parseInt(
__guard__(dir.split('-'), x => x[0]),
16
)
const age = currentTime - dirTime
return age > OutputCacheManager.CACHE_AGE
}
const toRemove = _.filter(dirs, isExpired)
const removeDir = (dir, cb) =>
fse.remove(Path.join(cacheRoot, dir), function(err, result) {
logger.log({ cache: cacheRoot, dir }, 'removed expired cache dir')
if (err != null) {
logger.error({ err, dir }, 'cache remove error')
}
return cb(err, result)
})
return async.eachSeries(
toRemove,
(dir, cb) => removeDir(dir, cb),
callback
)
})
},
_fileIsHidden(path) {
return (path != null ? path.match(/^\.|\/\./) : undefined) != null
},
_checkFileIsSafe(src, callback) {
// check if we have a valid file to copy into the cache
if (callback == null) {
callback = function(error, isSafe) {}
}
return fs.stat(src, function(err, stats) {
if ((err != null ? err.code : undefined) === 'ENOENT') {
logger.warn(
{ err, file: src },
'file has disappeared before copying to build cache'
)
return callback(err, false)
} else if (err != null) {
// some other problem reading the file
logger.error({ err, file: src }, 'stat error for file in cache')
return callback(err, false)
} else if (!stats.isFile()) {
// other filetype - reject it
logger.warn(
{ src, stat: stats },
'nonfile output - refusing to copy to cache'
)
return callback(null, false)
} else {
// it's a plain file, ok to copy
return callback(null, true)
}
})
},
_copyFile(src, dst, callback) {
// copy output file into the cache
return fse.copy(src, dst, function(err) {
if ((err != null ? err.code : undefined) === 'ENOENT') {
logger.warn(
{ err, file: src },
'file has disappeared when copying to build cache'
)
return callback(err, false)
} else if (err != null) {
logger.error({ err, src, dst }, 'copy error for file in cache')
return callback(err)
} else {
if (
Settings.clsi != null ? Settings.clsi.optimiseInDocker : undefined
) {
// don't run any optimisations on the pdf when they are done
// in the docker container
return callback()
} else {
// call the optimiser for the file too
return OutputFileOptimiser.optimiseFile(src, dst, callback)
}
}
})
},
_checkIfShouldCopy(src, callback) {
if (callback == null) {
callback = function(err, shouldCopy) {}
}
return callback(null, !Path.basename(src).match(/^strace/))
},
_checkIfShouldArchive(src, callback) {
let needle
if (callback == null) {
callback = function(err, shouldCopy) {}
}
if (Path.basename(src).match(/^strace/)) {
return callback(null, true)
}
if (
(Settings.clsi != null ? Settings.clsi.archive_logs : undefined) &&
((needle = Path.basename(src)),
['output.log', 'output.blg'].includes(needle))
) {
return callback(null, true)
}
return callback(null, false)
}
}
function __guard__(value, transform) {
return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
}

115
app/js/OutputFileFinder.js Normal file
View File

@@ -0,0 +1,115 @@
/* eslint-disable
handle-callback-err,
no-return-assign,
no-unused-vars,
no-useless-escape,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS103: Rewrite code to no longer use __guard__
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let OutputFileFinder
const async = require('async')
const fs = require('fs')
const Path = require('path')
const { spawn } = require('child_process')
const logger = require('logger-sharelatex')
module.exports = OutputFileFinder = {
findOutputFiles(resources, directory, callback) {
if (callback == null) {
callback = function(error, outputFiles, allFiles) {}
}
const incomingResources = {}
for (const resource of Array.from(resources)) {
incomingResources[resource.path] = true
}
return OutputFileFinder._getAllFiles(directory, function(error, allFiles) {
if (allFiles == null) {
allFiles = []
}
if (error != null) {
logger.err({ err: error }, 'error finding all output files')
return callback(error)
}
const outputFiles = []
for (const file of Array.from(allFiles)) {
if (!incomingResources[file]) {
outputFiles.push({
path: file,
type: __guard__(file.match(/\.([^\.]+)$/), x => x[1])
})
}
}
return callback(null, outputFiles, allFiles)
})
},
_getAllFiles(directory, _callback) {
if (_callback == null) {
_callback = function(error, fileList) {}
}
const callback = function(error, fileList) {
_callback(error, fileList)
return (_callback = function() {})
}
// don't include clsi-specific files/directories in the output list
const EXCLUDE_DIRS = [
'-name',
'.cache',
'-o',
'-name',
'.archive',
'-o',
'-name',
'.project-*'
]
const args = [
directory,
'(',
...Array.from(EXCLUDE_DIRS),
')',
'-prune',
'-o',
'-type',
'f',
'-print'
]
logger.log({ args }, 'running find command')
const proc = spawn('find', args)
let stdout = ''
proc.stdout.setEncoding('utf8').on('data', chunk => (stdout += chunk))
proc.on('error', callback)
return proc.on('close', function(code) {
if (code !== 0) {
logger.warn(
{ directory, code },
"find returned error, directory likely doesn't exist"
)
return callback(null, [])
}
let fileList = stdout.trim().split('\n')
fileList = fileList.map(function(file) {
// Strip leading directory
let path
return (path = Path.relative(directory, file))
})
return callback(null, fileList)
})
}
}
function __guard__(value, transform) {
return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
}

View File

@@ -0,0 +1,103 @@
/* eslint-disable
handle-callback-err,
no-return-assign,
no-undef,
no-unused-vars,
node/no-deprecated-api,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let OutputFileOptimiser
const fs = require('fs')
const Path = require('path')
const { spawn } = require('child_process')
const logger = require('logger-sharelatex')
const Metrics = require('./Metrics')
const _ = require('lodash')
module.exports = OutputFileOptimiser = {
optimiseFile(src, dst, callback) {
// check output file (src) and see if we can optimise it, storing
// the result in the build directory (dst)
if (callback == null) {
callback = function(error) {}
}
if (src.match(/\/output\.pdf$/)) {
return OutputFileOptimiser.checkIfPDFIsOptimised(src, function(
err,
isOptimised
) {
if (err != null || isOptimised) {
return callback(null)
}
return OutputFileOptimiser.optimisePDF(src, dst, callback)
})
} else {
return callback(null)
}
},
checkIfPDFIsOptimised(file, callback) {
const SIZE = 16 * 1024 // check the header of the pdf
const result = Buffer.alloc(SIZE) // fills with zeroes by default
return fs.open(file, 'r', function(err, fd) {
if (err != null) {
return callback(err)
}
return fs.read(fd, result, 0, SIZE, 0, (errRead, bytesRead, buffer) =>
fs.close(fd, function(errClose) {
if (errRead != null) {
return callback(errRead)
}
if (typeof errReadClose !== 'undefined' && errReadClose !== null) {
return callback(errClose)
}
const isOptimised =
buffer.toString('ascii').indexOf('/Linearized 1') >= 0
return callback(null, isOptimised)
})
)
})
},
optimisePDF(src, dst, callback) {
if (callback == null) {
callback = function(error) {}
}
const tmpOutput = dst + '.opt'
const args = ['--linearize', src, tmpOutput]
logger.log({ args }, 'running qpdf command')
const timer = new Metrics.Timer('qpdf')
const proc = spawn('qpdf', args)
let stdout = ''
proc.stdout.setEncoding('utf8').on('data', chunk => (stdout += chunk))
callback = _.once(callback) // avoid double call back for error and close event
proc.on('error', function(err) {
logger.warn({ err, args }, 'qpdf failed')
return callback(null)
}) // ignore the error
return proc.on('close', function(code) {
timer.done()
if (code !== 0) {
logger.warn({ code, args }, 'qpdf returned error')
return callback(null) // ignore the error
}
return fs.rename(tmpOutput, dst, function(err) {
if (err != null) {
logger.warn(
{ tmpOutput, dst },
'failed to rename output of qpdf command'
)
}
return callback(null)
})
})
} // ignore the error
}

View File

@@ -0,0 +1,185 @@
/* eslint-disable
camelcase,
handle-callback-err,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let ProjectPersistenceManager
const UrlCache = require('./UrlCache')
const CompileManager = require('./CompileManager')
const db = require('./db')
const dbQueue = require('./DbQueue')
const async = require('async')
const logger = require('logger-sharelatex')
const oneDay = 24 * 60 * 60 * 1000
const Settings = require('settings-sharelatex')
const diskusage = require('diskusage')
module.exports = ProjectPersistenceManager = {
EXPIRY_TIMEOUT: Settings.project_cache_length_ms || oneDay * 2.5,
refreshExpiryTimeout(callback) {
if (callback == null) {
callback = function(error) {}
}
diskusage.check('/', function(err, stats) {
if (err) {
logger.err({ err: err }, 'error getting disk usage')
return callback(err)
}
const lowDisk = stats.available / stats.total < 0.1
const lowerExpiry = ProjectPersistenceManager.EXPIRY_TIMEOUT * 0.9
if (lowDisk && Settings.project_cache_length_ms / 2 < lowerExpiry) {
logger.warn(
{ stats: stats },
'disk running low on space, modifying EXPIRY_TIMEOUT'
)
ProjectPersistenceManager.EXPIRY_TIMEOUT = lowerExpiry
}
callback()
})
},
markProjectAsJustAccessed(project_id, callback) {
if (callback == null) {
callback = function(error) {}
}
const job = cb =>
db.Project.findOrCreate({ where: { project_id } })
.spread((project, created) =>
project
.update({ lastAccessed: new Date() })
.then(() => cb())
.error(cb)
)
.error(cb)
return dbQueue.queue.push(job, callback)
},
clearExpiredProjects(callback) {
if (callback == null) {
callback = function(error) {}
}
return ProjectPersistenceManager._findExpiredProjectIds(function(
error,
project_ids
) {
if (error != null) {
return callback(error)
}
logger.log({ project_ids }, 'clearing expired projects')
const jobs = Array.from(project_ids || []).map(project_id =>
(project_id => callback =>
ProjectPersistenceManager.clearProjectFromCache(project_id, function(
err
) {
if (err != null) {
logger.error({ err, project_id }, 'error clearing project')
}
return callback()
}))(project_id)
)
return async.series(jobs, function(error) {
if (error != null) {
return callback(error)
}
return CompileManager.clearExpiredProjects(
ProjectPersistenceManager.EXPIRY_TIMEOUT,
error => callback()
)
})
})
}, // ignore any errors from deleting directories
clearProject(project_id, user_id, callback) {
if (callback == null) {
callback = function(error) {}
}
logger.log({ project_id, user_id }, 'clearing project for user')
return CompileManager.clearProject(project_id, user_id, function(error) {
if (error != null) {
return callback(error)
}
return ProjectPersistenceManager.clearProjectFromCache(
project_id,
function(error) {
if (error != null) {
return callback(error)
}
return callback()
}
)
})
},
clearProjectFromCache(project_id, callback) {
if (callback == null) {
callback = function(error) {}
}
logger.log({ project_id }, 'clearing project from cache')
return UrlCache.clearProject(project_id, function(error) {
if (error != null) {
logger.err({ error, project_id }, 'error clearing project from cache')
return callback(error)
}
return ProjectPersistenceManager._clearProjectFromDatabase(
project_id,
function(error) {
if (error != null) {
logger.err(
{ error, project_id },
'error clearing project from database'
)
}
return callback(error)
}
)
})
},
_clearProjectFromDatabase(project_id, callback) {
if (callback == null) {
callback = function(error) {}
}
logger.log({ project_id }, 'clearing project from database')
const job = cb =>
db.Project.destroy({ where: { project_id } })
.then(() => cb())
.error(cb)
return dbQueue.queue.push(job, callback)
},
_findExpiredProjectIds(callback) {
if (callback == null) {
callback = function(error, project_ids) {}
}
const job = function(cb) {
const keepProjectsFrom = new Date(
Date.now() - ProjectPersistenceManager.EXPIRY_TIMEOUT
)
const q = {}
q[db.op.lt] = keepProjectsFrom
return db.Project.findAll({ where: { lastAccessed: q } })
.then(projects =>
cb(
null,
projects.map(project => project.project_id)
)
)
.error(cb)
}
return dbQueue.queue.push(job, callback)
}
}
logger.log(
{ EXPIRY_TIMEOUT: ProjectPersistenceManager.EXPIRY_TIMEOUT },
'project assets kept timeout'
)

227
app/js/RequestParser.js Normal file
View File

@@ -0,0 +1,227 @@
/* eslint-disable
handle-callback-err,
no-control-regex,
no-throw-literal,
no-unused-vars,
no-useless-escape,
standard/no-callback-literal,
valid-typeof,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS205: Consider reworking code to avoid use of IIFEs
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let RequestParser
const settings = require('settings-sharelatex')
module.exports = RequestParser = {
VALID_COMPILERS: ['pdflatex', 'latex', 'xelatex', 'lualatex'],
MAX_TIMEOUT: 600,
parse(body, callback) {
let resource
if (callback == null) {
callback = function(error, data) {}
}
const response = {}
if (body.compile == null) {
return callback('top level object should have a compile attribute')
}
const { compile } = body
if (!compile.options) {
compile.options = {}
}
try {
response.compiler = this._parseAttribute(
'compiler',
compile.options.compiler,
{
validValues: this.VALID_COMPILERS,
default: 'pdflatex',
type: 'string'
}
)
response.timeout = this._parseAttribute(
'timeout',
compile.options.timeout,
{
default: RequestParser.MAX_TIMEOUT,
type: 'number'
}
)
response.imageName = this._parseAttribute(
'imageName',
compile.options.imageName,
{ type: 'string' }
)
response.draft = this._parseAttribute('draft', compile.options.draft, {
default: false,
type: 'boolean'
})
response.check = this._parseAttribute('check', compile.options.check, {
type: 'string'
})
response.flags = this._parseAttribute('flags', compile.options.flags, {
default: [],
type: 'object'
})
if (settings.allowedCompileGroups) {
response.compileGroup = this._parseAttribute(
'compileGroup',
compile.options.compileGroup,
{
validValues: settings.allowedCompileGroups,
default: '',
type: 'string'
}
)
}
// The syncType specifies whether the request contains all
// resources (full) or only those resources to be updated
// in-place (incremental).
response.syncType = this._parseAttribute(
'syncType',
compile.options.syncType,
{
validValues: ['full', 'incremental'],
type: 'string'
}
)
// The syncState is an identifier passed in with the request
// which has the property that it changes when any resource is
// added, deleted, moved or renamed.
//
// on syncType full the syncState identifier is passed in and
// stored
//
// on syncType incremental the syncState identifier must match
// the stored value
response.syncState = this._parseAttribute(
'syncState',
compile.options.syncState,
{ type: 'string' }
)
if (response.timeout > RequestParser.MAX_TIMEOUT) {
response.timeout = RequestParser.MAX_TIMEOUT
}
response.timeout = response.timeout * 1000 // milliseconds
response.resources = (() => {
const result = []
for (resource of Array.from(compile.resources || [])) {
result.push(this._parseResource(resource))
}
return result
})()
const rootResourcePath = this._parseAttribute(
'rootResourcePath',
compile.rootResourcePath,
{
default: 'main.tex',
type: 'string'
}
)
const originalRootResourcePath = rootResourcePath
const sanitizedRootResourcePath = RequestParser._sanitizePath(
rootResourcePath
)
response.rootResourcePath = RequestParser._checkPath(
sanitizedRootResourcePath
)
for (resource of Array.from(response.resources)) {
if (resource.path === originalRootResourcePath) {
resource.path = sanitizedRootResourcePath
}
}
} catch (error1) {
const error = error1
return callback(error)
}
return callback(null, response)
},
_parseResource(resource) {
let modified
if (resource.path == null || typeof resource.path !== 'string') {
throw 'all resources should have a path attribute'
}
if (resource.modified != null) {
modified = new Date(resource.modified)
if (isNaN(modified.getTime())) {
throw `resource modified date could not be understood: ${resource.modified}`
}
}
if (resource.url == null && resource.content == null) {
throw 'all resources should have either a url or content attribute'
}
if (resource.content != null && typeof resource.content !== 'string') {
throw 'content attribute should be a string'
}
if (resource.url != null && typeof resource.url !== 'string') {
throw 'url attribute should be a string'
}
return {
path: resource.path,
modified,
url: resource.url,
content: resource.content
}
},
_parseAttribute(name, attribute, options) {
if (attribute != null) {
if (options.validValues != null) {
if (options.validValues.indexOf(attribute) === -1) {
throw `${name} attribute should be one of: ${options.validValues.join(
', '
)}`
}
}
if (options.type != null) {
if (typeof attribute !== options.type) {
throw `${name} attribute should be a ${options.type}`
}
}
} else {
if (options.default != null) {
return options.default
}
}
return attribute
},
_sanitizePath(path) {
// See http://php.net/manual/en/function.escapeshellcmd.php
return path.replace(
/[\#\&\;\`\|\*\?\~\<\>\^\(\)\[\]\{\}\$\\\x0A\xFF\x00]/g,
''
)
},
_checkPath(path) {
// check that the request does not use a relative path
for (const dir of Array.from(path.split('/'))) {
if (dir === '..') {
throw 'relative path in root resource'
}
}
return path
}
}

View File

@@ -0,0 +1,154 @@
/* eslint-disable
handle-callback-err,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS103: Rewrite code to no longer use __guard__
* DS201: Simplify complex destructure assignments
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let ResourceStateManager
const Path = require('path')
const fs = require('fs')
const logger = require('logger-sharelatex')
const settings = require('settings-sharelatex')
const Errors = require('./Errors')
const SafeReader = require('./SafeReader')
module.exports = ResourceStateManager = {
// The sync state is an identifier which must match for an
// incremental update to be allowed.
//
// The initial value is passed in and stored on a full
// compile, along with the list of resources..
//
// Subsequent incremental compiles must come with the same value - if
// not they will be rejected with a 409 Conflict response. The
// previous list of resources is returned.
//
// An incremental compile can only update existing files with new
// content. The sync state identifier must change if any docs or
// files are moved, added, deleted or renamed.
SYNC_STATE_FILE: '.project-sync-state',
SYNC_STATE_MAX_SIZE: 128 * 1024,
saveProjectState(state, resources, basePath, callback) {
if (callback == null) {
callback = function(error) {}
}
const stateFile = Path.join(basePath, this.SYNC_STATE_FILE)
if (state == null) {
// remove the file if no state passed in
logger.log({ state, basePath }, 'clearing sync state')
return fs.unlink(stateFile, function(err) {
if (err != null && err.code !== 'ENOENT') {
return callback(err)
} else {
return callback()
}
})
} else {
logger.log({ state, basePath }, 'writing sync state')
const resourceList = Array.from(resources).map(resource => resource.path)
return fs.writeFile(
stateFile,
[...Array.from(resourceList), `stateHash:${state}`].join('\n'),
callback
)
}
},
checkProjectStateMatches(state, basePath, callback) {
if (callback == null) {
callback = function(error, resources) {}
}
const stateFile = Path.join(basePath, this.SYNC_STATE_FILE)
const size = this.SYNC_STATE_MAX_SIZE
return SafeReader.readFile(stateFile, size, 'utf8', function(
err,
result,
bytesRead
) {
if (err != null) {
return callback(err)
}
if (bytesRead === size) {
logger.error(
{ file: stateFile, size, bytesRead },
'project state file truncated'
)
}
const array =
__guard__(result != null ? result.toString() : undefined, x =>
x.split('\n')
) || []
const adjustedLength = Math.max(array.length, 1)
const resourceList = array.slice(0, adjustedLength - 1)
const oldState = array[adjustedLength - 1]
const newState = `stateHash:${state}`
logger.log(
{ state, oldState, basePath, stateMatches: newState === oldState },
'checking sync state'
)
if (newState !== oldState) {
return callback(
new Errors.FilesOutOfSyncError('invalid state for incremental update')
)
} else {
const resources = Array.from(resourceList).map(path => ({ path }))
return callback(null, resources)
}
})
},
checkResourceFiles(resources, allFiles, basePath, callback) {
// check the paths are all relative to current directory
let file
if (callback == null) {
callback = function(error) {}
}
for (file of Array.from(resources || [])) {
for (const dir of Array.from(
__guard__(file != null ? file.path : undefined, x => x.split('/'))
)) {
if (dir === '..') {
return callback(new Error('relative path in resource file list'))
}
}
}
// check if any of the input files are not present in list of files
const seenFile = {}
for (file of Array.from(allFiles)) {
seenFile[file] = true
}
const missingFiles = Array.from(resources)
.filter(resource => !seenFile[resource.path])
.map(resource => resource.path)
if ((missingFiles != null ? missingFiles.length : undefined) > 0) {
logger.err(
{ missingFiles, basePath, allFiles, resources },
'missing input files for project'
)
return callback(
new Errors.FilesOutOfSyncError(
'resource files missing in incremental update'
)
)
} else {
return callback()
}
}
}
function __guard__(value, transform) {
return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
}

354
app/js/ResourceWriter.js Normal file
View File

@@ -0,0 +1,354 @@
/* eslint-disable
camelcase,
handle-callback-err,
no-return-assign,
no-unused-vars,
no-useless-escape,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let ResourceWriter
const UrlCache = require('./UrlCache')
const Path = require('path')
const fs = require('fs')
const async = require('async')
const OutputFileFinder = require('./OutputFileFinder')
const ResourceStateManager = require('./ResourceStateManager')
const Metrics = require('./Metrics')
const logger = require('logger-sharelatex')
const settings = require('settings-sharelatex')
const parallelFileDownloads = settings.parallelFileDownloads || 1
module.exports = ResourceWriter = {
syncResourcesToDisk(request, basePath, callback) {
if (callback == null) {
callback = function(error, resourceList) {}
}
if (request.syncType === 'incremental') {
logger.log(
{ project_id: request.project_id, user_id: request.user_id },
'incremental sync'
)
return ResourceStateManager.checkProjectStateMatches(
request.syncState,
basePath,
function(error, resourceList) {
if (error != null) {
return callback(error)
}
return ResourceWriter._removeExtraneousFiles(
resourceList,
basePath,
function(error, outputFiles, allFiles) {
if (error != null) {
return callback(error)
}
return ResourceStateManager.checkResourceFiles(
resourceList,
allFiles,
basePath,
function(error) {
if (error != null) {
return callback(error)
}
return ResourceWriter.saveIncrementalResourcesToDisk(
request.project_id,
request.resources,
basePath,
function(error) {
if (error != null) {
return callback(error)
}
return callback(null, resourceList)
}
)
}
)
}
)
}
)
} else {
logger.log(
{ project_id: request.project_id, user_id: request.user_id },
'full sync'
)
return this.saveAllResourcesToDisk(
request.project_id,
request.resources,
basePath,
function(error) {
if (error != null) {
return callback(error)
}
return ResourceStateManager.saveProjectState(
request.syncState,
request.resources,
basePath,
function(error) {
if (error != null) {
return callback(error)
}
return callback(null, request.resources)
}
)
}
)
}
},
saveIncrementalResourcesToDisk(project_id, resources, basePath, callback) {
if (callback == null) {
callback = function(error) {}
}
return this._createDirectory(basePath, error => {
if (error != null) {
return callback(error)
}
const jobs = Array.from(resources).map(resource =>
(resource => {
return callback =>
this._writeResourceToDisk(project_id, resource, basePath, callback)
})(resource)
)
return async.parallelLimit(jobs, parallelFileDownloads, callback)
})
},
saveAllResourcesToDisk(project_id, resources, basePath, callback) {
if (callback == null) {
callback = function(error) {}
}
return this._createDirectory(basePath, error => {
if (error != null) {
return callback(error)
}
return this._removeExtraneousFiles(resources, basePath, error => {
if (error != null) {
return callback(error)
}
const jobs = Array.from(resources).map(resource =>
(resource => {
return callback =>
this._writeResourceToDisk(
project_id,
resource,
basePath,
callback
)
})(resource)
)
return async.parallelLimit(jobs, parallelFileDownloads, callback)
})
})
},
_createDirectory(basePath, callback) {
if (callback == null) {
callback = function(error) {}
}
return fs.mkdir(basePath, function(err) {
if (err != null) {
if (err.code === 'EEXIST') {
return callback()
} else {
logger.log({ err, dir: basePath }, 'error creating directory')
return callback(err)
}
} else {
return callback()
}
})
},
_removeExtraneousFiles(resources, basePath, _callback) {
if (_callback == null) {
_callback = function(error, outputFiles, allFiles) {}
}
const timer = new Metrics.Timer('unlink-output-files')
const callback = function(error, ...result) {
timer.done()
return _callback(error, ...Array.from(result))
}
return OutputFileFinder.findOutputFiles(resources, basePath, function(
error,
outputFiles,
allFiles
) {
if (error != null) {
return callback(error)
}
const jobs = []
for (const file of Array.from(outputFiles || [])) {
;(function(file) {
const { path } = file
let should_delete = true
if (
path.match(/^output\./) ||
path.match(/\.aux$/) ||
path.match(/^cache\//)
) {
// knitr cache
should_delete = false
}
if (path.match(/^output-.*/)) {
// Tikz cached figures (default case)
should_delete = false
}
if (path.match(/\.(pdf|dpth|md5)$/)) {
// Tikz cached figures (by extension)
should_delete = false
}
if (
path.match(/\.(pygtex|pygstyle)$/) ||
path.match(/(^|\/)_minted-[^\/]+\//)
) {
// minted files/directory
should_delete = false
}
if (
path.match(/\.md\.tex$/) ||
path.match(/(^|\/)_markdown_[^\/]+\//)
) {
// markdown files/directory
should_delete = false
}
if (path.match(/-eps-converted-to\.pdf$/)) {
// Epstopdf generated files
should_delete = false
}
if (
path === 'output.pdf' ||
path === 'output.dvi' ||
path === 'output.log' ||
path === 'output.xdv' ||
path === 'output.stdout' ||
path === 'output.stderr'
) {
should_delete = true
}
if (path === 'output.tex') {
// created by TikzManager if present in output files
should_delete = true
}
if (should_delete) {
return jobs.push(callback =>
ResourceWriter._deleteFileIfNotDirectory(
Path.join(basePath, path),
callback
)
)
}
})(file)
}
return async.series(jobs, function(error) {
if (error != null) {
return callback(error)
}
return callback(null, outputFiles, allFiles)
})
})
},
_deleteFileIfNotDirectory(path, callback) {
if (callback == null) {
callback = function(error) {}
}
return fs.stat(path, function(error, stat) {
if (error != null && error.code === 'ENOENT') {
return callback()
} else if (error != null) {
logger.err(
{ err: error, path },
'error stating file in deleteFileIfNotDirectory'
)
return callback(error)
} else if (stat.isFile()) {
return fs.unlink(path, function(error) {
if (error != null) {
logger.err(
{ err: error, path },
'error removing file in deleteFileIfNotDirectory'
)
return callback(error)
} else {
return callback()
}
})
} else {
return callback()
}
})
},
_writeResourceToDisk(project_id, resource, basePath, callback) {
if (callback == null) {
callback = function(error) {}
}
return ResourceWriter.checkPath(basePath, resource.path, function(
error,
path
) {
if (error != null) {
return callback(error)
}
return fs.mkdir(Path.dirname(path), { recursive: true }, function(error) {
if (error != null) {
return callback(error)
}
// TODO: Don't overwrite file if it hasn't been modified
if (resource.url != null) {
return UrlCache.downloadUrlToFile(
project_id,
resource.url,
path,
resource.modified,
function(err) {
if (err != null) {
logger.err(
{
err,
project_id,
path,
resource_url: resource.url,
modified: resource.modified
},
'error downloading file for resources'
)
Metrics.inc('download-failed')
}
return callback()
}
) // try and continue compiling even if http resource can not be downloaded at this time
} else {
const process = require('process')
fs.writeFile(path, resource.content, callback)
try {
let result
return (result = fs.lstatSync(path))
} catch (e) {}
}
})
})
},
checkPath(basePath, resourcePath, callback) {
const path = Path.normalize(Path.join(basePath, resourcePath))
if (path.slice(0, basePath.length + 1) !== basePath + '/') {
return callback(new Error('resource path is outside root directory'))
} else {
return callback(null, path)
}
}
}

60
app/js/SafeReader.js Normal file
View File

@@ -0,0 +1,60 @@
/* eslint-disable
handle-callback-err,
no-unused-vars,
node/no-deprecated-api,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let SafeReader
const fs = require('fs')
const logger = require('logger-sharelatex')
module.exports = SafeReader = {
// safely read up to size bytes from a file and return result as a
// string
readFile(file, size, encoding, callback) {
if (callback == null) {
callback = function(error, result) {}
}
return fs.open(file, 'r', function(err, fd) {
if (err != null && err.code === 'ENOENT') {
return callback()
}
if (err != null) {
return callback(err)
}
// safely return always closing the file
const callbackWithClose = (err, ...result) =>
fs.close(fd, function(err1) {
if (err != null) {
return callback(err)
}
if (err1 != null) {
return callback(err1)
}
return callback(null, ...Array.from(result))
})
const buff = Buffer.alloc(size) // fills with zeroes by default
return fs.read(fd, buff, 0, buff.length, 0, function(
err,
bytesRead,
buffer
) {
if (err != null) {
return callbackWithClose(err)
}
const result = buffer.toString(encoding, 0, bytesRead)
return callbackWithClose(null, result, bytesRead)
})
})
}
}

View File

@@ -0,0 +1,94 @@
/* eslint-disable
camelcase,
no-cond-assign,
no-unused-vars,
node/no-deprecated-api,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS103: Rewrite code to no longer use __guard__
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let ForbidSymlinks
const Path = require('path')
const fs = require('fs')
const Settings = require('settings-sharelatex')
const logger = require('logger-sharelatex')
const url = require('url')
module.exports = ForbidSymlinks = function(staticFn, root, options) {
const expressStatic = staticFn(root, options)
const basePath = Path.resolve(root)
return function(req, res, next) {
let file, project_id, result
const path = __guard__(url.parse(req.url), x => x.pathname)
// check that the path is of the form /project_id_or_name/path/to/file.log
if ((result = path.match(/^\/?([a-zA-Z0-9_-]+)\/(.*)/))) {
project_id = result[1]
file = result[2]
} else {
logger.warn({ path }, 'unrecognized file request')
return res.sendStatus(404)
}
// check that the file does not use a relative path
for (const dir of Array.from(file.split('/'))) {
if (dir === '..') {
logger.warn({ path }, 'attempt to use a relative path')
return res.sendStatus(404)
}
}
// check that the requested path is normalized
const requestedFsPath = `${basePath}/${project_id}/${file}`
if (requestedFsPath !== Path.normalize(requestedFsPath)) {
logger.error(
{ path: requestedFsPath },
'requestedFsPath is not normalized'
)
return res.sendStatus(404)
}
// check that the requested path is not a symlink
return fs.realpath(requestedFsPath, function(err, realFsPath) {
if (err != null) {
if (err.code === 'ENOENT') {
return res.sendStatus(404)
} else {
logger.error(
{
err,
requestedFsPath,
realFsPath,
path: req.params[0],
project_id: req.params.project_id
},
'error checking file access'
)
return res.sendStatus(500)
}
} else if (requestedFsPath !== realFsPath) {
logger.warn(
{
requestedFsPath,
realFsPath,
path: req.params[0],
project_id: req.params.project_id
},
'trying to access a different file (symlink), aborting'
)
return res.sendStatus(404)
} else {
return expressStatic(req, res, next)
}
})
}
}
function __guard__(value, transform) {
return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
}

94
app/js/TikzManager.js Normal file
View File

@@ -0,0 +1,94 @@
/* eslint-disable
handle-callback-err,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let TikzManager
const fs = require('fs')
const Path = require('path')
const ResourceWriter = require('./ResourceWriter')
const SafeReader = require('./SafeReader')
const logger = require('logger-sharelatex')
// for \tikzexternalize or pstool to work the main file needs to match the
// jobname. Since we set the -jobname to output, we have to create a
// copy of the main file as 'output.tex'.
module.exports = TikzManager = {
checkMainFile(compileDir, mainFile, resources, callback) {
// if there's already an output.tex file, we don't want to touch it
if (callback == null) {
callback = function(error, needsMainFile) {}
}
for (const resource of Array.from(resources)) {
if (resource.path === 'output.tex') {
logger.log({ compileDir, mainFile }, 'output.tex already in resources')
return callback(null, false)
}
}
// if there's no output.tex, see if we are using tikz/pgf or pstool in the main file
return ResourceWriter.checkPath(compileDir, mainFile, function(
error,
path
) {
if (error != null) {
return callback(error)
}
return SafeReader.readFile(path, 65536, 'utf8', function(error, content) {
if (error != null) {
return callback(error)
}
const usesTikzExternalize =
(content != null
? content.indexOf('\\tikzexternalize')
: undefined) >= 0
const usesPsTool =
(content != null ? content.indexOf('{pstool}') : undefined) >= 0
logger.log(
{ compileDir, mainFile, usesTikzExternalize, usesPsTool },
'checked for packages needing main file as output.tex'
)
const needsMainFile = usesTikzExternalize || usesPsTool
return callback(null, needsMainFile)
})
})
},
injectOutputFile(compileDir, mainFile, callback) {
if (callback == null) {
callback = function(error) {}
}
return ResourceWriter.checkPath(compileDir, mainFile, function(
error,
path
) {
if (error != null) {
return callback(error)
}
return fs.readFile(path, 'utf8', function(error, content) {
if (error != null) {
return callback(error)
}
logger.log(
{ compileDir, mainFile },
'copied file to output.tex as project uses packages which require it'
)
// use wx flag to ensure that output file does not already exist
return fs.writeFile(
Path.join(compileDir, 'output.tex'),
content,
{ flag: 'wx' },
callback
)
})
})
}
}

278
app/js/UrlCache.js Normal file
View File

@@ -0,0 +1,278 @@
/* eslint-disable
camelcase,
handle-callback-err,
no-return-assign,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let UrlCache
const db = require('./db')
const dbQueue = require('./DbQueue')
const UrlFetcher = require('./UrlFetcher')
const Settings = require('settings-sharelatex')
const crypto = require('crypto')
const fs = require('fs')
const logger = require('logger-sharelatex')
const async = require('async')
module.exports = UrlCache = {
downloadUrlToFile(project_id, url, destPath, lastModified, callback) {
if (callback == null) {
callback = function(error) {}
}
return UrlCache._ensureUrlIsInCache(
project_id,
url,
lastModified,
(error, pathToCachedUrl) => {
if (error != null) {
return callback(error)
}
return UrlCache._copyFile(pathToCachedUrl, destPath, function(error) {
if (error != null) {
return UrlCache._clearUrlDetails(project_id, url, () =>
callback(error)
)
} else {
return callback(error)
}
})
}
)
},
clearProject(project_id, callback) {
if (callback == null) {
callback = function(error) {}
}
return UrlCache._findAllUrlsInProject(project_id, function(error, urls) {
logger.log(
{ project_id, url_count: urls.length },
'clearing project URLs'
)
if (error != null) {
return callback(error)
}
const jobs = Array.from(urls || []).map(url =>
(url => callback =>
UrlCache._clearUrlFromCache(project_id, url, function(error) {
if (error != null) {
logger.error(
{ err: error, project_id, url },
'error clearing project URL'
)
}
return callback()
}))(url)
)
return async.series(jobs, callback)
})
},
_ensureUrlIsInCache(project_id, url, lastModified, callback) {
if (callback == null) {
callback = function(error, pathOnDisk) {}
}
if (lastModified != null) {
// MYSQL only stores dates to an accuracy of a second but the incoming lastModified might have milliseconds.
// So round down to seconds
lastModified = new Date(Math.floor(lastModified.getTime() / 1000) * 1000)
}
return UrlCache._doesUrlNeedDownloading(
project_id,
url,
lastModified,
(error, needsDownloading) => {
if (error != null) {
return callback(error)
}
if (needsDownloading) {
logger.log({ url, lastModified }, 'downloading URL')
return UrlFetcher.pipeUrlToFileWithRetry(
url,
UrlCache._cacheFilePathForUrl(project_id, url),
error => {
if (error != null) {
return callback(error)
}
return UrlCache._updateOrCreateUrlDetails(
project_id,
url,
lastModified,
error => {
if (error != null) {
return callback(error)
}
return callback(
null,
UrlCache._cacheFilePathForUrl(project_id, url)
)
}
)
}
)
} else {
logger.log({ url, lastModified }, 'URL is up to date in cache')
return callback(null, UrlCache._cacheFilePathForUrl(project_id, url))
}
}
)
},
_doesUrlNeedDownloading(project_id, url, lastModified, callback) {
if (callback == null) {
callback = function(error, needsDownloading) {}
}
if (lastModified == null) {
return callback(null, true)
}
return UrlCache._findUrlDetails(project_id, url, function(
error,
urlDetails
) {
if (error != null) {
return callback(error)
}
if (
urlDetails == null ||
urlDetails.lastModified == null ||
urlDetails.lastModified.getTime() < lastModified.getTime()
) {
return callback(null, true)
} else {
return callback(null, false)
}
})
},
_cacheFileNameForUrl(project_id, url) {
return (
project_id +
':' +
crypto
.createHash('md5')
.update(url)
.digest('hex')
)
},
_cacheFilePathForUrl(project_id, url) {
return `${Settings.path.clsiCacheDir}/${UrlCache._cacheFileNameForUrl(
project_id,
url
)}`
},
_copyFile(from, to, _callback) {
if (_callback == null) {
_callback = function(error) {}
}
const callbackOnce = function(error) {
if (error != null) {
logger.error({ err: error, from, to }, 'error copying file from cache')
}
_callback(error)
return (_callback = function() {})
}
const writeStream = fs.createWriteStream(to)
const readStream = fs.createReadStream(from)
writeStream.on('error', callbackOnce)
readStream.on('error', callbackOnce)
writeStream.on('close', callbackOnce)
return writeStream.on('open', () => readStream.pipe(writeStream))
},
_clearUrlFromCache(project_id, url, callback) {
if (callback == null) {
callback = function(error) {}
}
return UrlCache._clearUrlDetails(project_id, url, function(error) {
if (error != null) {
return callback(error)
}
return UrlCache._deleteUrlCacheFromDisk(project_id, url, function(error) {
if (error != null) {
return callback(error)
}
return callback(null)
})
})
},
_deleteUrlCacheFromDisk(project_id, url, callback) {
if (callback == null) {
callback = function(error) {}
}
return fs.unlink(UrlCache._cacheFilePathForUrl(project_id, url), function(
error
) {
if (error != null && error.code !== 'ENOENT') {
// no error if the file isn't present
return callback(error)
} else {
return callback()
}
})
},
_findUrlDetails(project_id, url, callback) {
if (callback == null) {
callback = function(error, urlDetails) {}
}
const job = cb =>
db.UrlCache.findOne({ where: { url, project_id } })
.then(urlDetails => cb(null, urlDetails))
.error(cb)
return dbQueue.queue.push(job, callback)
},
_updateOrCreateUrlDetails(project_id, url, lastModified, callback) {
if (callback == null) {
callback = function(error) {}
}
const job = cb =>
db.UrlCache.findOrCreate({ where: { url, project_id } })
.spread((urlDetails, created) =>
urlDetails
.update({ lastModified })
.then(() => cb())
.error(cb)
)
.error(cb)
return dbQueue.queue.push(job, callback)
},
_clearUrlDetails(project_id, url, callback) {
if (callback == null) {
callback = function(error) {}
}
const job = cb =>
db.UrlCache.destroy({ where: { url, project_id } })
.then(() => cb(null))
.error(cb)
return dbQueue.queue.push(job, callback)
},
_findAllUrlsInProject(project_id, callback) {
if (callback == null) {
callback = function(error, urls) {}
}
const job = cb =>
db.UrlCache.findAll({ where: { project_id } })
.then(urlEntries =>
cb(
null,
urlEntries.map(entry => entry.url)
)
)
.error(cb)
return dbQueue.queue.push(job, callback)
}
}

128
app/js/UrlFetcher.js Normal file
View File

@@ -0,0 +1,128 @@
/* eslint-disable
handle-callback-err,
no-return-assign,
no-unused-vars,
node/no-deprecated-api,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let UrlFetcher
const request = require('request').defaults({ jar: false })
const fs = require('fs')
const logger = require('logger-sharelatex')
const settings = require('settings-sharelatex')
const URL = require('url')
const async = require('async')
const oneMinute = 60 * 1000
module.exports = UrlFetcher = {
pipeUrlToFileWithRetry(url, filePath, callback) {
const doDownload = function(cb) {
UrlFetcher.pipeUrlToFile(url, filePath, cb)
}
async.retry(3, doDownload, callback)
},
pipeUrlToFile(url, filePath, _callback) {
if (_callback == null) {
_callback = function(error) {}
}
const callbackOnce = function(error) {
if (timeoutHandler != null) {
clearTimeout(timeoutHandler)
}
_callback(error)
return (_callback = function() {})
}
if (settings.filestoreDomainOveride != null) {
const p = URL.parse(url).path
url = `${settings.filestoreDomainOveride}${p}`
}
var timeoutHandler = setTimeout(
function() {
timeoutHandler = null
logger.error({ url, filePath }, 'Timed out downloading file to cache')
return callbackOnce(
new Error(`Timed out downloading file to cache ${url}`)
)
},
// FIXME: maybe need to close fileStream here
3 * oneMinute
)
logger.log({ url, filePath }, 'started downloading url to cache')
const urlStream = request.get({ url, timeout: oneMinute })
urlStream.pause() // stop data flowing until we are ready
// attach handlers before setting up pipes
urlStream.on('error', function(error) {
logger.error({ err: error, url, filePath }, 'error downloading url')
return callbackOnce(
error || new Error(`Something went wrong downloading the URL ${url}`)
)
})
urlStream.on('end', () =>
logger.log({ url, filePath }, 'finished downloading file into cache')
)
return urlStream.on('response', function(res) {
if (res.statusCode >= 200 && res.statusCode < 300) {
const fileStream = fs.createWriteStream(filePath)
// attach handlers before setting up pipes
fileStream.on('error', function(error) {
logger.error(
{ err: error, url, filePath },
'error writing file into cache'
)
return fs.unlink(filePath, function(err) {
if (err != null) {
logger.err({ err, filePath }, 'error deleting file from cache')
}
return callbackOnce(error)
})
})
fileStream.on('finish', function() {
logger.log({ url, filePath }, 'finished writing file into cache')
return callbackOnce()
})
fileStream.on('pipe', () =>
logger.log({ url, filePath }, 'piping into filestream')
)
urlStream.pipe(fileStream)
return urlStream.resume() // now we are ready to handle the data
} else {
logger.error(
{ statusCode: res.statusCode, url, filePath },
'unexpected status code downloading url to cache'
)
// https://nodejs.org/api/http.html#http_class_http_clientrequest
// If you add a 'response' event handler, then you must consume
// the data from the response object, either by calling
// response.read() whenever there is a 'readable' event, or by
// adding a 'data' handler, or by calling the .resume()
// method. Until the data is consumed, the 'end' event will not
// fire. Also, until the data is read it will consume memory
// that can eventually lead to a 'process out of memory' error.
urlStream.resume() // discard the data
return callbackOnce(
new Error(
`URL returned non-success status code: ${res.statusCode} ${url}`
)
)
}
})
}
}

67
app/js/db.js Normal file
View File

@@ -0,0 +1,67 @@
/* eslint-disable
no-console,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const Sequelize = require('sequelize')
const Settings = require('settings-sharelatex')
const _ = require('lodash')
const logger = require('logger-sharelatex')
const options = _.extend({ logging: false }, Settings.mysql.clsi)
logger.log({ dbPath: Settings.mysql.clsi.storage }, 'connecting to db')
const sequelize = new Sequelize(
Settings.mysql.clsi.database,
Settings.mysql.clsi.username,
Settings.mysql.clsi.password,
options
)
if (Settings.mysql.clsi.dialect === 'sqlite') {
logger.log('running PRAGMA journal_mode=WAL;')
sequelize.query('PRAGMA journal_mode=WAL;')
sequelize.query('PRAGMA synchronous=OFF;')
sequelize.query('PRAGMA read_uncommitted = true;')
}
module.exports = {
UrlCache: sequelize.define(
'UrlCache',
{
url: Sequelize.STRING,
project_id: Sequelize.STRING,
lastModified: Sequelize.DATE
},
{
indexes: [{ fields: ['url', 'project_id'] }, { fields: ['project_id'] }]
}
),
Project: sequelize.define(
'Project',
{
project_id: { type: Sequelize.STRING, primaryKey: true },
lastAccessed: Sequelize.DATE
},
{
indexes: [{ fields: ['lastAccessed'] }]
}
),
op: Sequelize.Op,
sync() {
logger.log({ dbPath: Settings.mysql.clsi.storage }, 'syncing db schema')
return sequelize
.sync()
.then(() => logger.log('db sync complete'))
.catch(err => console.log(err, 'error syncing'))
}
}

4
bin/acceptance_test Normal file
View File

@@ -0,0 +1,4 @@
#!/bin/bash
set -e;
MOCHA="node_modules/.bin/mocha --recursive --reporter spec --timeout 15000"
$MOCHA "$@"

BIN
bin/synctex Executable file

Binary file not shown.

11
buildscript.txt Normal file
View File

@@ -0,0 +1,11 @@
clsi
--acceptance-creds=None
--data-dirs=cache,compiles,db
--dependencies=
--docker-repos=gcr.io/overleaf-ops
--env-add=
--env-pass-through=TEXLIVE_IMAGE
--language=es
--node-version=10.21.0
--public-repo=True
--script-version=2.1.0

View File

@@ -1,44 +0,0 @@
Path = require "path"
module.exports =
# Options are passed to Sequelize.
# See http://sequelizejs.com/documentation#usage-options for details
mysql:
clsi:
database: "clsi"
username: "clsi"
password: null
dialect: "sqlite"
storage: Path.resolve(__dirname + "/../db.sqlite")
path:
compilesDir: Path.resolve(__dirname + "/../compiles")
clsiCacheDir: Path.resolve(__dirname + "/../cache")
synctexBaseDir: (project_id) -> Path.join(@compilesDir, project_id)
internal:
clsi:
port: 3013
host: process.env["LISTEN_ADDRESS"] or "localhost"
apis:
clsi:
url: "http://localhost:3013"
smokeTest: false
project_cache_length_ms: 1000 * 60 * 60 * 24
parallelFileDownloads:1
if process.env["COMMAND_RUNNER"]
module.exports.clsi =
commandRunner: process.env["COMMAND_RUNNER"]
docker:
image: process.env["TEXLIVE_IMAGE"] or "quay.io/sharelatex/texlive-full:2017.1"
env:
HOME: "/tmp"
socketPath: "/var/run/docker.sock"
user: process.env["TEXLIVE_IMAGE_USER"] or "tex"
expireProjectAfterIdleMs: 24 * 60 * 60 * 1000
checkProjectsIntervalMs: 10 * 60 * 1000
module.exports.path.sandboxedCompilesHostDir = process.env["COMPILES_HOST_DIR"]

137
config/settings.defaults.js Normal file
View File

@@ -0,0 +1,137 @@
const Path = require('path')
module.exports = {
// Options are passed to Sequelize.
// See http://sequelizejs.com/documentation#usage-options for details
mysql: {
clsi: {
database: 'clsi',
username: 'clsi',
dialect: 'sqlite',
storage:
process.env.SQLITE_PATH || Path.resolve(__dirname, '../db/db.sqlite'),
pool: {
max: 1,
min: 1
},
retry: {
max: 10
}
}
},
compileSizeLimit: process.env.COMPILE_SIZE_LIMIT || '7mb',
processLifespanLimitMs:
parseInt(process.env.PROCESS_LIFE_SPAN_LIMIT_MS) || 60 * 60 * 24 * 1000 * 2,
path: {
compilesDir: Path.resolve(__dirname, '../compiles'),
clsiCacheDir: Path.resolve(__dirname, '../cache'),
synctexBaseDir(projectId) {
return Path.join(this.compilesDir, projectId)
}
},
internal: {
clsi: {
port: 3013,
host: process.env.LISTEN_ADDRESS || 'localhost'
},
load_balancer_agent: {
report_load: true,
load_port: 3048,
local_port: 3049
}
},
apis: {
clsi: {
url: `http://${process.env.CLSI_HOST || 'localhost'}:3013`
}
},
smokeTest: process.env.SMOKE_TEST || false,
project_cache_length_ms: 1000 * 60 * 60 * 24,
parallelFileDownloads: process.env.FILESTORE_PARALLEL_FILE_DOWNLOADS || 1,
parallelSqlQueryLimit: process.env.FILESTORE_PARALLEL_SQL_QUERY_LIMIT || 1,
filestoreDomainOveride: process.env.FILESTORE_DOMAIN_OVERRIDE,
texliveImageNameOveride: process.env.TEX_LIVE_IMAGE_NAME_OVERRIDE,
texliveOpenoutAny: process.env.TEXLIVE_OPENOUT_ANY,
sentry: {
dsn: process.env.SENTRY_DSN
}
}
if (process.env.ALLOWED_COMPILE_GROUPS) {
try {
module.exports.allowedCompileGroups = process.env.ALLOWED_COMPILE_GROUPS.split(
' '
)
} catch (error) {
console.error(error, 'could not apply allowed compile group setting')
process.exit(1)
}
}
if (process.env.DOCKER_RUNNER) {
let seccompProfilePath
module.exports.clsi = {
dockerRunner: process.env.DOCKER_RUNNER === 'true',
docker: {
runtime: process.env.DOCKER_RUNTIME,
image:
process.env.TEXLIVE_IMAGE || 'quay.io/sharelatex/texlive-full:2017.1',
env: {
HOME: '/tmp'
},
socketPath: '/var/run/docker.sock',
user: process.env.TEXLIVE_IMAGE_USER || 'tex'
},
optimiseInDocker: true,
expireProjectAfterIdleMs: 24 * 60 * 60 * 1000,
checkProjectsIntervalMs: 10 * 60 * 1000
}
try {
// Override individual docker settings using path-based keys, e.g.:
// compileGroupDockerConfigs = {
// priority: { 'HostConfig.CpuShares': 100 }
// beta: { 'dotted.path.here', 'value'}
// }
const compileGroupConfig = JSON.parse(
process.env.COMPILE_GROUP_DOCKER_CONFIGS || '{}'
)
// Automatically clean up wordcount and synctex containers
const defaultCompileGroupConfig = {
wordcount: { 'HostConfig.AutoRemove': true },
synctex: { 'HostConfig.AutoRemove': true }
}
module.exports.clsi.docker.compileGroupConfig = Object.assign(
defaultCompileGroupConfig,
compileGroupConfig
)
} catch (error) {
console.error(error, 'could not apply compile group docker configs')
process.exit(1)
}
try {
seccompProfilePath = Path.resolve(__dirname, '../seccomp/clsi-profile.json')
module.exports.clsi.docker.seccomp_profile = JSON.stringify(
JSON.parse(require('fs').readFileSync(seccompProfilePath))
)
} catch (error) {
console.error(
error,
`could not load seccomp profile from ${seccompProfilePath}`
)
process.exit(1)
}
module.exports.path.synctexBaseDir = () => '/compile'
module.exports.path.sandboxedCompilesHostDir = process.env.COMPILES_HOST_DIR
module.exports.path.synctexBinHostPath = process.env.SYNCTEX_BIN_HOST_PATH
}

2
db/.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
*
!.gitignore

5
debug Executable file
View File

@@ -0,0 +1,5 @@
#!/bin/bash
echo "hello world"
sleep 3
echo "awake"
/opt/synctex pdf /compile/output.pdf 1 100 200

32
docker-compose-config.yml Normal file
View File

@@ -0,0 +1,32 @@
version: "2.3"
services:
dev:
environment:
TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1
TEXLIVE_IMAGE_USER: "tex"
SHARELATEX_CONFIG: /app/config/settings.defaults.coffee
DOCKER_RUNNER: "true"
COMPILES_HOST_DIR: $PWD/compiles
SYNCTEX_BIN_HOST_PATH: $PWD/bin/synctex
volumes:
- /var/run/docker.sock:/var/run/docker.sock
- ./compiles:/app/compiles
- ./cache:/app/cache
- ./bin/synctex:/app/bin/synctex
ci:
environment:
TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1
TEXLIVE_IMAGE_USER: "tex"
SHARELATEX_CONFIG: /app/config/settings.defaults.coffee
DOCKER_RUNNER: "true"
COMPILES_HOST_DIR: $PWD/compiles
SYNCTEX_BIN_HOST_PATH: $PWD/bin/synctex
SQLITE_PATH: /app/compiles/db.sqlite
volumes:
- /var/run/docker.sock:/var/run/docker.sock:rw
- ./compiles:/app/compiles
- ./cache:/app/cache
- ./bin/synctex:/app/bin/synctex

38
docker-compose.ci.yml Normal file
View File

@@ -0,0 +1,38 @@
# This file was auto-generated, do not edit it directly.
# Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment
version: "2.3"
services:
test_unit:
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
command: npm run test:unit:_run
environment:
NODE_ENV: test
test_acceptance:
build: .
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
extends:
file: docker-compose-config.yml
service: ci
environment:
ELASTIC_SEARCH_DSN: es:9200
REDIS_HOST: redis
MONGO_HOST: mongo
POSTGRES_HOST: postgres
MOCHA_GREP: ${MOCHA_GREP}
NODE_ENV: test
TEXLIVE_IMAGE:
command: npm run test:acceptance:_run
tar:
build: .
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
volumes:
- ./:/tmp/build/
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
user: root

39
docker-compose.yml Normal file
View File

@@ -0,0 +1,39 @@
# This file was auto-generated, do not edit it directly.
# Instead run bin/update_build_scripts from
# https://github.com/sharelatex/sharelatex-dev-environment
version: "2.3"
services:
test_unit:
build:
context: .
target: base
volumes:
- .:/app
working_dir: /app
environment:
MOCHA_GREP: ${MOCHA_GREP}
NODE_ENV: test
command: npm run test:unit
test_acceptance:
build:
context: .
target: base
volumes:
- .:/app
working_dir: /app
extends:
file: docker-compose-config.yml
service: dev
environment:
ELASTIC_SEARCH_DSN: es:9200
REDIS_HOST: redis
MONGO_HOST: mongo
POSTGRES_HOST: postgres
MOCHA_GREP: ${MOCHA_GREP}
LOG_LEVEL: ERROR
NODE_ENV: test
command: npm run test:acceptance

18
entrypoint.sh Executable file
View File

@@ -0,0 +1,18 @@
#!/bin/sh
docker --version >&2
# add the node user to the docker group on the host
DOCKER_GROUP=$(stat -c '%g' /var/run/docker.sock)
groupadd --non-unique --gid ${DOCKER_GROUP} dockeronhost
usermod -aG dockeronhost node
# compatibility: initial volume setup
chown node:node /app/cache
chown node:node /app/compiles
chown node:node /app/db
# make synctex available for remount in compiles
cp /app/bin/synctex /app/bin/synctex-mount/synctex
exec runuser -u node -- "$@"

4
install_deps.sh Executable file
View File

@@ -0,0 +1,4 @@
/bin/sh
wget -qO- https://get.docker.com/ | sh
apt-get install poppler-utils vim ghostscript --yes
npm rebuild

41
kube.yaml Normal file
View File

@@ -0,0 +1,41 @@
apiVersion: v1
kind: Service
metadata:
name: clsi
namespace: default
spec:
type: LoadBalancer
ports:
- port: 80
protocol: TCP
targetPort: 80
selector:
run: clsi
---
apiVersion: extensions/v1beta1
kind: Deployment
metadata:
name: clsi
namespace: default
spec:
replicas: 2
template:
metadata:
labels:
run: clsi
spec:
containers:
- name: clsi
image: gcr.io/henry-terraform-admin/clsi
imagePullPolicy: Always
readinessProbe:
httpGet:
path: status
port: 80
periodSeconds: 5
initialDelaySeconds: 0
failureThreshold: 3
successThreshold: 1

18
nodemon.json Normal file
View File

@@ -0,0 +1,18 @@
{
"ignore": [
".git",
"node_modules/"
],
"verbose": true,
"legacyWatch": true,
"execMap": {
"js": "npm run start"
},
"watch": [
"app/js/",
"app.js",
"config/"
],
"ext": "js"
}

7119
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -7,46 +7,62 @@
"url": "https://github.com/sharelatex/clsi-sharelatex.git" "url": "https://github.com/sharelatex/clsi-sharelatex.git"
}, },
"scripts": { "scripts": {
"compile:app": "coffee -o app/js -c app/coffee && coffee -c app.coffee", "start": "node $NODE_APP_OPTIONS app.js",
"start": "npm run compile:app && node app.js" "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js",
"test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP",
"test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js",
"test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP",
"nodemon": "nodemon --config nodemon.json",
"lint": "node_modules/.bin/eslint .",
"format": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --list-different",
"format:fix": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --write"
}, },
"author": "James Allen <james@sharelatex.com>", "author": "James Allen <james@sharelatex.com>",
"dependencies": { "dependencies": {
"async": "0.2.9", "async": "3.2.0",
"body-parser": "^1.2.0", "body-parser": "^1.19.0",
"express": "^4.2.0", "diskusage": "^1.1.3",
"fs-extra": "^0.16.3", "dockerode": "^3.1.0",
"grunt-mkdir": "^1.0.0", "express": "^4.17.1",
"heapdump": "^0.3.5", "fs-extra": "^8.1.0",
"lockfile": "^1.0.3", "heapdump": "^0.3.15",
"logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.5.4", "lockfile": "^1.0.4",
"lynx": "0.0.11", "lodash": "^4.17.15",
"metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.5.0", "logger-sharelatex": "^2.1.0",
"mkdirp": "0.3.5", "lynx": "0.2.0",
"mysql": "2.6.2", "metrics-sharelatex": "^2.6.0",
"request": "^2.21.0", "mysql": "^2.18.1",
"sequelize": "^2.1.3", "request": "^2.88.2",
"settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0", "sequelize": "^5.21.5",
"smoke-test-sharelatex": "git+https://github.com/sharelatex/smoke-test-sharelatex.git#v0.2.0", "settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.1.0",
"sqlite3": "~3.1.8", "sqlite3": "^4.1.1",
"underscore": "^1.8.2", "v8-profiler-node8": "^6.1.1",
"v8-profiler": "^5.2.4", "wrench": "~1.5.9"
"wrench": "~1.5.4"
}, },
"devDependencies": { "devDependencies": {
"mocha": "1.10.0", "babel-eslint": "^10.1.0",
"coffee-script": "1.6.0", "bunyan": "^1.8.12",
"chai": "~1.8.1", "chai": "~4.2.0",
"sinon": "~1.7.3", "eslint": "^6.8.0",
"grunt": "~0.4.2", "eslint-config-prettier": "^6.10.0",
"grunt-contrib-coffee": "~0.7.0", "eslint-config-standard": "^14.1.0",
"grunt-contrib-clean": "~0.5.0", "eslint-config-standard-jsx": "^8.1.0",
"grunt-shell": "~0.6.1", "eslint-config-standard-react": "^9.2.0",
"grunt-mocha-test": "~0.8.1", "eslint-plugin-chai-expect": "^2.1.0",
"sandboxed-module": "~0.3.0", "eslint-plugin-chai-friendly": "^0.5.0",
"timekeeper": "0.0.4", "eslint-plugin-import": "^2.20.1",
"grunt-execute": "^0.1.5", "eslint-plugin-jsx-a11y": "^6.2.3",
"bunyan": "^0.22.1", "eslint-plugin-mocha": "^6.3.0",
"grunt-bunyan": "^0.5.0" "eslint-plugin-node": "^11.0.0",
"eslint-plugin-prettier": "^3.1.2",
"eslint-plugin-promise": "^4.2.1",
"eslint-plugin-react": "^7.19.0",
"eslint-plugin-standard": "^4.0.1",
"mocha": "^7.1.0",
"prettier": "^1.19.1",
"prettier-eslint-cli": "^5.0.0",
"sandboxed-module": "^2.0.3",
"sinon": "~9.0.1",
"timekeeper": "2.2.0"
} }
} }

3
patch-texlive-dockerfile Normal file
View File

@@ -0,0 +1,3 @@
FROM quay.io/sharelatex/texlive-full:2017.1
# RUN usermod -u 1001 tex

836
seccomp/clsi-profile.json Normal file
View File

@@ -0,0 +1,836 @@
{
"defaultAction": "SCMP_ACT_ERRNO",
"architectures": [
"SCMP_ARCH_X86_64",
"SCMP_ARCH_X86",
"SCMP_ARCH_X32"
],
"syscalls": [
{
"name": "access",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "arch_prctl",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "brk",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "chdir",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "chmod",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "clock_getres",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "clock_gettime",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "clock_nanosleep",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "clone",
"action": "SCMP_ACT_ALLOW",
"args": [
{
"index": 0,
"value": 2080505856,
"valueTwo": 0,
"op": "SCMP_CMP_MASKED_EQ"
}
]
},
{
"name": "close",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "copy_file_range",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "creat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "dup",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "dup2",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "dup3",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "execve",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "execveat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "exit",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "exit_group",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "faccessat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fadvise64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fadvise64_64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fallocate",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fchdir",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fchmod",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fchmodat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fcntl",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fcntl64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fdatasync",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fork",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fstat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fstat64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fstatat64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fstatfs",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fstatfs64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fsync",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "ftruncate",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "ftruncate64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "futex",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "futimesat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getcpu",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getcwd",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getdents",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getdents64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getegid",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getegid32",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "geteuid",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "geteuid32",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getgid",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getgid32",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getgroups",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getgroups32",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getpgid",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getpgrp",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getpid",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getppid",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getpriority",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getresgid",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getresgid32",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getresuid",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getresuid32",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getrlimit",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "get_robust_list",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getrusage",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getsid",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "gettid",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getuid",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "getuid32",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "ioctl",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "kill",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "_llseek",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "lseek",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "lstat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "lstat64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "madvise",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "mkdir",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "mkdirat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "mmap",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "mmap2",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "mprotect",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "mremap",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "munmap",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "newfstatat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "open",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "openat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "pause",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "pipe",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "pipe2",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "prctl",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "pread64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "preadv",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "prlimit64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "pwrite64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "pwritev",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "read",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "readlink",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "readlinkat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "readv",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "rename",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "renameat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "renameat2",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "restart_syscall",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "rmdir",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "rt_sigaction",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "rt_sigpending",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "rt_sigprocmask",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "rt_sigqueueinfo",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "rt_sigreturn",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "rt_sigsuspend",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "rt_sigtimedwait",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "rt_tgsigqueueinfo",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "sched_getaffinity",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "sched_getparam",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "sched_get_priority_max",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "sched_get_priority_min",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "sched_getscheduler",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "sched_rr_get_interval",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "sched_yield",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "sendfile",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "sendfile64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "setgroups",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "setgroups32",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "set_robust_list",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "set_tid_address",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "sigaltstack",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "stat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "stat64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "statfs",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "statfs64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "sync",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "sync_file_range",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "syncfs",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "sysinfo",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "tgkill",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "timer_create",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "timer_delete",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "timer_getoverrun",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "timer_gettime",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "timer_settime",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "times",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "tkill",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "truncate",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "truncate64",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "umask",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "uname",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "unlink",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "unlinkat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "utime",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "utimensat",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "utimes",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "vfork",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "vhangup",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "wait4",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "waitid",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "write",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "writev",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "pread",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "setgid",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "setuid",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "capget",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "capset",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "fchown",
"action": "SCMP_ACT_ALLOW",
"args": []
},
{
"name": "gettimeofday",
"action": "SCMP_ACT_ALLOW",
"args": []
}, {
"name": "epoll_pwait",
"action": "SCMP_ACT_ALLOW",
"args": []
}
]
}

34
synctex.profile Normal file
View File

@@ -0,0 +1,34 @@
include /etc/firejail/disable-common.inc
include /etc/firejail/disable-devel.inc
# include /etc/firejail/disable-mgmt.inc ## removed in 0.9.40
# include /etc/firejail/disable-secret.inc ## removed in 0.9.40
read-only /bin
blacklist /boot
blacklist /dev
read-only /etc
blacklist /home # blacklisted for synctex
read-only /lib
read-only /lib64
blacklist /media
blacklist /mnt
blacklist /opt
blacklist /root
read-only /run
blacklist /sbin
blacklist /selinux
blacklist /src
blacklist /sys
read-only /usr
caps.drop all
noroot
nogroups
net none
private-tmp
private-dev
shell none
seccomp
nonewprivs

View File

@@ -1,46 +0,0 @@
Client = require "./helpers/Client"
request = require "request"
require("chai").should()
describe "Broken LaTeX file", ->
before ->
@broken_request =
resources: [
path: "main.tex"
content: '''
\\documentclass{articl % :(
\\begin{documen % :(
Broken
\\end{documen % :(
'''
]
@correct_request =
resources: [
path: "main.tex"
content: '''
\\documentclass{article}
\\begin{document}
Hello world
\\end{document}
'''
]
describe "on first run", ->
before (done) ->
@project_id = Client.randomId()
Client.compile @project_id, @broken_request, (@error, @res, @body) => done()
it "should return a failure status", ->
@body.compile.status.should.equal "failure"
describe "on second run", ->
before (done) ->
@project_id = Client.randomId()
Client.compile @project_id, @correct_request, () =>
Client.compile @project_id, @broken_request, (@error, @res, @body) =>
done()
it "should return a failure status", ->
@body.compile.status.should.equal "failure"

View File

@@ -1,34 +0,0 @@
Client = require "./helpers/Client"
request = require "request"
require("chai").should()
describe "Deleting Old Files", ->
before ->
@request =
resources: [
path: "main.tex"
content: '''
\\documentclass{article}
\\begin{document}
Hello world
\\end{document}
'''
]
describe "on first run", ->
before (done) ->
@project_id = Client.randomId()
Client.compile @project_id, @request, (@error, @res, @body) => done()
it "should return a success status", ->
@body.compile.status.should.equal "success"
describe "after file has been deleted", ->
before (done) ->
@request.resources = []
Client.compile @project_id, @request, (@error, @res, @body) =>
done()
it "should return a failure status", ->
@body.compile.status.should.equal "failure"

View File

@@ -1,110 +0,0 @@
Client = require "./helpers/Client"
request = require "request"
require("chai").should()
fs = require "fs"
ChildProcess = require "child_process"
fixturePath = (path) -> __dirname + "/../fixtures/" + path
try
fs.mkdirSync(fixturePath("tmp"))
catch e
convertToPng = (pdfPath, pngPath, callback = (error) ->) ->
convert = ChildProcess.exec "convert #{fixturePath(pdfPath)} #{fixturePath(pngPath)}"
stdout = ""
convert.stdout.on "data", (chunk) -> console.log "STDOUT", chunk.toString()
convert.stderr.on "data", (chunk) -> console.log "STDERR", chunk.toString()
convert.on "exit", () ->
callback()
compare = (originalPath, generatedPath, callback = (error, same) ->) ->
diff_file = "#{fixturePath(generatedPath)}-diff.png"
proc = ChildProcess.exec "compare -metric mae #{fixturePath(originalPath)} #{fixturePath(generatedPath)} #{diff_file}"
stderr = ""
proc.stderr.on "data", (chunk) -> stderr += chunk
proc.on "exit", () ->
if stderr.trim() == "0 (0)"
fs.unlink diff_file # remove output diff if test matches expected image
callback null, true
else
console.log "compare result", stderr
callback null, false
checkPdfInfo = (pdfPath, callback = (error, output) ->) ->
proc = ChildProcess.exec "pdfinfo #{fixturePath(pdfPath)}"
stdout = ""
proc.stdout.on "data", (chunk) -> stdout += chunk
proc.stderr.on "data", (chunk) -> console.log "STDERR", chunk.toString()
proc.on "exit", () ->
if stdout.match(/Optimized:\s+yes/)
callback null, true
else
console.log "pdfinfo result", stdout
callback null, false
compareMultiplePages = (project_id, callback = (error) ->) ->
compareNext = (page_no, callback) ->
path = "tmp/#{project_id}-source-#{page_no}.png"
fs.stat fixturePath(path), (error, stat) ->
if error?
callback()
else
compare "tmp/#{project_id}-source-#{page_no}.png", "tmp/#{project_id}-generated-#{page_no}.png", (error, same) =>
throw error if error?
same.should.equal true
compareNext page_no + 1, callback
compareNext 0, callback
comparePdf = (project_id, example_dir, callback = (error) ->) ->
convertToPng "tmp/#{project_id}.pdf", "tmp/#{project_id}-generated.png", (error) =>
throw error if error?
convertToPng "examples/#{example_dir}/output.pdf", "tmp/#{project_id}-source.png", (error) =>
throw error if error?
fs.stat fixturePath("tmp/#{project_id}-source-0.png"), (error, stat) =>
if error?
compare "tmp/#{project_id}-source.png", "tmp/#{project_id}-generated.png", (error, same) =>
throw error if error?
same.should.equal true
callback()
else
compareMultiplePages project_id, (error) ->
throw error if error?
callback()
downloadAndComparePdf = (project_id, example_dir, url, callback = (error) ->) ->
writeStream = fs.createWriteStream(fixturePath("tmp/#{project_id}.pdf"))
request.get(url).pipe(writeStream)
writeStream.on "close", () =>
checkPdfInfo "tmp/#{project_id}.pdf", (error, optimised) =>
throw error if error?
optimised.should.equal true
comparePdf project_id, example_dir, callback
Client.runServer(4242, fixturePath("examples"))
describe "Example Documents", ->
before (done) ->
ChildProcess.exec("rm test/acceptance/fixtures/tmp/*").on "exit", () -> done()
for example_dir in fs.readdirSync fixturePath("examples")
do (example_dir) ->
describe example_dir, ->
before ->
@project_id = Client.randomId() + "_" + example_dir
it "should generate the correct pdf", (done) ->
Client.compileDirectory @project_id, fixturePath("examples"), example_dir, 4242, (error, res, body) =>
if error || body?.compile?.status is "failure"
console.log "DEBUG: error", error, "body", JSON.stringify(body)
pdf = Client.getOutputFile body, "pdf"
downloadAndComparePdf(@project_id, example_dir, pdf.url, done)
it "should generate the correct pdf on the second run as well", (done) ->
Client.compileDirectory @project_id, fixturePath("examples"), example_dir, 4242, (error, res, body) =>
if error || body?.compile?.status is "failure"
console.log "DEBUG: error", error, "body", JSON.stringify(body)
pdf = Client.getOutputFile body, "pdf"
downloadAndComparePdf(@project_id, example_dir, pdf.url, done)

View File

@@ -1,39 +0,0 @@
Client = require "./helpers/Client"
request = require "request"
require("chai").should()
describe "Simple LaTeX file", ->
before (done) ->
@project_id = Client.randomId()
@request =
resources: [
path: "main.tex"
content: '''
\\documentclass{article}
\\begin{document}
Hello world
\\end{document}
'''
]
Client.compile @project_id, @request, (@error, @res, @body) => done()
it "should return the PDF", ->
pdf = Client.getOutputFile(@body, "pdf")
pdf.type.should.equal "pdf"
it "should return the log", ->
log = Client.getOutputFile(@body, "log")
log.type.should.equal "log"
it "should provide the pdf for download", (done) ->
pdf = Client.getOutputFile(@body, "pdf")
request.get pdf.url, (error, res, body) ->
res.statusCode.should.equal 200
done()
it "should provide the log for download", (done) ->
log = Client.getOutputFile(@body, "pdf")
request.get log.url, (error, res, body) ->
res.statusCode.should.equal 200
done()

View File

@@ -1,37 +0,0 @@
Client = require "./helpers/Client"
request = require "request"
require("chai").should()
expect = require("chai").expect
describe "Syncing", ->
before (done) ->
@request =
resources: [
path: "main.tex"
content: '''
\\documentclass{article}
\\begin{document}
Hello world
\\end{document}
'''
]
@project_id = Client.randomId()
Client.compile @project_id, @request, (@error, @res, @body) => done()
describe "from code to pdf", ->
it "should return the correct location", (done) ->
Client.syncFromCode @project_id, "main.tex", 3, 5, (error, pdfPositions) ->
throw error if error?
expect(pdfPositions).to.deep.equal(
pdf: [ { page: 1, h: 133.77, v: 134.76, height: 6.92, width: 343.71 } ]
)
done()
describe "from pdf to code", ->
it "should return the correct location", (done) ->
Client.syncFromPdf @project_id, 1, 100, 200, (error, codePositions) ->
throw error if error?
expect(codePositions).to.deep.equal(
code: [ { file: 'main.tex', line: 3, column: -1 } ]
)
done()

View File

@@ -1,28 +0,0 @@
Client = require "./helpers/Client"
request = require "request"
require("chai").should()
describe "Timed out compile", ->
before (done) ->
@request =
options:
timeout: 1 #seconds
resources: [
path: "main.tex"
content: '''
\\documentclass{article}
\\begin{document}
Hello world
\\input{|"sleep 10"}
\\end{document}
'''
]
@project_id = Client.randomId()
Client.compile @project_id, @request, (@error, @res, @body) => done()
it "should return a timeout error", ->
@body.compile.error.should.equal "container timed out"
it "should return a timedout status", ->
@body.compile.status.should.equal "timedout"

View File

@@ -1,220 +0,0 @@
Client = require "./helpers/Client"
request = require "request"
require("chai").should()
sinon = require "sinon"
host = "localhost"
Server =
run: () ->
express = require "express"
app = express()
staticServer = express.static __dirname + "/../fixtures/"
app.get "/:random_id/*", (req, res, next) =>
@getFile(req.url)
req.url = "/" + req.params[0]
staticServer(req, res, next)
app.listen 31415, host
getFile: () ->
randomId: () ->
Math.random().toString(16).slice(2)
Server.run()
describe "Url Caching", ->
describe "Downloading an image for the first time", ->
before (done) ->
@project_id = Client.randomId()
@file = "#{Server.randomId()}/lion.png"
@request =
resources: [{
path: "main.tex"
content: '''
\\documentclass{article}
\\usepackage{graphicx}
\\begin{document}
\\includegraphics{lion.png}
\\end{document}
'''
}, {
path: "lion.png"
url: "http://#{host}:31415/#{@file}"
}]
sinon.spy Server, "getFile"
Client.compile @project_id, @request, (@error, @res, @body) => done()
afterEach ->
Server.getFile.restore()
it "should download the image", ->
Server.getFile
.calledWith("/" + @file)
.should.equal true
describe "When an image is in the cache and the last modified date is unchanged", ->
before (done) ->
@project_id = Client.randomId()
@file = "#{Server.randomId()}/lion.png"
@request =
resources: [{
path: "main.tex"
content: '''
\\documentclass{article}
\\usepackage{graphicx}
\\begin{document}
\\includegraphics{lion.png}
\\end{document}
'''
}, @image_resource = {
path: "lion.png"
url: "http://#{host}:31415/#{@file}"
modified: Date.now()
}]
Client.compile @project_id, @request, (@error, @res, @body) =>
sinon.spy Server, "getFile"
Client.compile @project_id, @request, (@error, @res, @body) =>
done()
after ->
Server.getFile.restore()
it "should not download the image again", ->
Server.getFile.called.should.equal false
describe "When an image is in the cache and the last modified date is advanced", ->
before (done) ->
@project_id = Client.randomId()
@file = "#{Server.randomId()}/lion.png"
@request =
resources: [{
path: "main.tex"
content: '''
\\documentclass{article}
\\usepackage{graphicx}
\\begin{document}
\\includegraphics{lion.png}
\\end{document}
'''
}, @image_resource = {
path: "lion.png"
url: "http://#{host}:31415/#{@file}"
modified: @last_modified = Date.now()
}]
Client.compile @project_id, @request, (@error, @res, @body) =>
sinon.spy Server, "getFile"
@image_resource.modified = new Date(@last_modified + 3000)
Client.compile @project_id, @request, (@error, @res, @body) =>
done()
afterEach ->
Server.getFile.restore()
it "should download the image again", ->
Server.getFile.called.should.equal true
describe "When an image is in the cache and the last modified date is further in the past", ->
before (done) ->
@project_id = Client.randomId()
@file = "#{Server.randomId()}/lion.png"
@request =
resources: [{
path: "main.tex"
content: '''
\\documentclass{article}
\\usepackage{graphicx}
\\begin{document}
\\includegraphics{lion.png}
\\end{document}
'''
}, @image_resource = {
path: "lion.png"
url: "http://#{host}:31415/#{@file}"
modified: @last_modified = Date.now()
}]
Client.compile @project_id, @request, (@error, @res, @body) =>
sinon.spy Server, "getFile"
@image_resource.modified = new Date(@last_modified - 3000)
Client.compile @project_id, @request, (@error, @res, @body) =>
done()
afterEach ->
Server.getFile.restore()
it "should not download the image again", ->
Server.getFile.called.should.equal false
describe "When an image is in the cache and the last modified date is not specified", ->
before (done) ->
@project_id = Client.randomId()
@file = "#{Server.randomId()}/lion.png"
@request =
resources: [{
path: "main.tex"
content: '''
\\documentclass{article}
\\usepackage{graphicx}
\\begin{document}
\\includegraphics{lion.png}
\\end{document}
'''
}, @image_resource = {
path: "lion.png"
url: "http://#{host}:31415/#{@file}"
modified: @last_modified = Date.now()
}]
Client.compile @project_id, @request, (@error, @res, @body) =>
sinon.spy Server, "getFile"
delete @image_resource.modified
Client.compile @project_id, @request, (@error, @res, @body) =>
done()
afterEach ->
Server.getFile.restore()
it "should download the image again", ->
Server.getFile.called.should.equal true
describe "After clearing the cache", ->
before (done) ->
@project_id = Client.randomId()
@file = "#{Server.randomId()}/lion.png"
@request =
resources: [{
path: "main.tex"
content: '''
\\documentclass{article}
\\usepackage{graphicx}
\\begin{document}
\\includegraphics{lion.png}
\\end{document}
'''
}, @image_resource = {
path: "lion.png"
url: "http://#{host}:31415/#{@file}"
modified: @last_modified = Date.now()
}]
Client.compile @project_id, @request, (error) =>
throw error if error?
Client.clearCache @project_id, (error, res, body) =>
throw error if error?
sinon.spy Server, "getFile"
Client.compile @project_id, @request, (@error, @res, @body) =>
done()
afterEach ->
Server.getFile.restore()
it "should download the image again", ->
Server.getFile.called.should.equal true

View File

@@ -1,36 +0,0 @@
Client = require "./helpers/Client"
request = require "request"
require("chai").should()
expect = require("chai").expect
path = require("path")
fs = require("fs")
describe "Syncing", ->
before (done) ->
@request =
resources: [
path: "main.tex"
content: fs.readFileSync(path.join(__dirname,"../fixtures/naugty_strings.txt"),"utf-8")
]
@project_id = Client.randomId()
Client.compile @project_id, @request, (@error, @res, @body) => done()
describe "wordcount file", ->
it "should return wordcount info", (done) ->
Client.wordcount @project_id, "main.tex", (error, result) ->
throw error if error?
expect(result).to.deep.equal(
texcount: {
encode: "utf8"
textWords: 2281
headWords: 2
outside: 0
headers: 2
elements: 0
mathInline: 6
mathDisplay: 0
errors: 0
messages: ""
}
)
done()

View File

@@ -1,104 +0,0 @@
request = require "request"
fs = require "fs"
Settings = require "settings-sharelatex"
host = "localhost"
module.exports = Client =
host: Settings.apis.clsi.url
randomId: () ->
Math.random().toString(16).slice(2)
compile: (project_id, data, callback = (error, res, body) ->) ->
request.post {
url: "#{@host}/project/#{project_id}/compile"
json:
compile: data
}, callback
clearCache: (project_id, callback = (error, res, body) ->) ->
request.del "#{@host}/project/#{project_id}", callback
getOutputFile: (response, type) ->
for file in response.compile.outputFiles
if file.type == type and file.url.match("output.#{type}")
return file
return null
runServer: (port, directory) ->
express = require("express")
app = express()
app.use express.static(directory)
app.listen(port, host).on "error", (error) ->
console.error "error starting server:", error.message
process.exit(1)
syncFromCode: (project_id, file, line, column, callback = (error, pdfPositions) ->) ->
request.get {
url: "#{@host}/project/#{project_id}/sync/code"
qs: {
file: file
line: line
column: column
}
}, (error, response, body) ->
return callback(error) if error?
callback null, JSON.parse(body)
syncFromPdf: (project_id, page, h, v, callback = (error, pdfPositions) ->) ->
request.get {
url: "#{@host}/project/#{project_id}/sync/pdf"
qs: {
page: page,
h: h, v: v
}
}, (error, response, body) ->
return callback(error) if error?
callback null, JSON.parse(body)
compileDirectory: (project_id, baseDirectory, directory, serverPort, callback = (error, res, body) ->) ->
resources = []
entities = fs.readdirSync("#{baseDirectory}/#{directory}")
rootResourcePath = "main.tex"
while (entities.length > 0)
entity = entities.pop()
stat = fs.statSync("#{baseDirectory}/#{directory}/#{entity}")
if stat.isDirectory()
entities = entities.concat fs.readdirSync("#{baseDirectory}/#{directory}/#{entity}").map (subEntity) ->
if subEntity == "main.tex"
rootResourcePath = "#{entity}/#{subEntity}"
return "#{entity}/#{subEntity}"
else if stat.isFile() and entity != "output.pdf"
extension = entity.split(".").pop()
if ["tex", "bib", "cls", "sty", "pdf_tex", "Rtex", "ist", "md", "Rmd"].indexOf(extension) > -1
resources.push
path: entity
content: fs.readFileSync("#{baseDirectory}/#{directory}/#{entity}").toString()
else if ["eps", "ttf", "png", "jpg", "pdf", "jpeg"].indexOf(extension) > -1
resources.push
path: entity
url: "http://#{host}:#{serverPort}/#{directory}/#{entity}"
modified: stat.mtime
fs.readFile "#{baseDirectory}/#{directory}/options.json", (error, body) =>
req =
resources: resources
rootResourcePath: rootResourcePath
if !error?
body = JSON.parse body
req.options = body
@compile project_id, req, callback
wordcount: (project_id, file, callback = (error, pdfPositions) ->) ->
request.get {
url: "#{@host}/project/#{project_id}/wordcount"
qs: {
file: file
}
}, (error, response, body) ->
return callback(error) if error?
callback null, JSON.parse(body)

View File

@@ -0,0 +1,88 @@
/* eslint-disable
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const Client = require('./helpers/Client')
const request = require('request')
require('chai').should()
const ClsiApp = require('./helpers/ClsiApp')
describe('Broken LaTeX file', function() {
before(function(done) {
this.broken_request = {
resources: [
{
path: 'main.tex',
content: `\
\\documentclass{articl % :(
\\begin{documen % :(
Broken
\\end{documen % :(\
`
}
]
}
this.correct_request = {
resources: [
{
path: 'main.tex',
content: `\
\\documentclass{article}
\\begin{document}
Hello world
\\end{document}\
`
}
]
}
return ClsiApp.ensureRunning(done)
})
describe('on first run', function() {
before(function(done) {
this.project_id = Client.randomId()
return Client.compile(
this.project_id,
this.broken_request,
(error, res, body) => {
this.error = error
this.res = res
this.body = body
return done()
}
)
})
return it('should return a failure status', function() {
return this.body.compile.status.should.equal('failure')
})
})
return describe('on second run', function() {
before(function(done) {
this.project_id = Client.randomId()
return Client.compile(this.project_id, this.correct_request, () => {
return Client.compile(
this.project_id,
this.broken_request,
(error, res, body) => {
this.error = error
this.res = res
this.body = body
return done()
}
)
})
})
return it('should return a failure status', function() {
return this.body.compile.status.should.equal('failure')
})
})
})

View File

@@ -0,0 +1,73 @@
/* eslint-disable
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const Client = require('./helpers/Client')
const request = require('request')
require('chai').should()
const ClsiApp = require('./helpers/ClsiApp')
describe('Deleting Old Files', function() {
before(function(done) {
this.request = {
resources: [
{
path: 'main.tex',
content: `\
\\documentclass{article}
\\begin{document}
Hello world
\\end{document}\
`
}
]
}
return ClsiApp.ensureRunning(done)
})
return describe('on first run', function() {
before(function(done) {
this.project_id = Client.randomId()
return Client.compile(
this.project_id,
this.request,
(error, res, body) => {
this.error = error
this.res = res
this.body = body
return done()
}
)
})
it('should return a success status', function() {
return this.body.compile.status.should.equal('success')
})
return describe('after file has been deleted', function() {
before(function(done) {
this.request.resources = []
return Client.compile(
this.project_id,
this.request,
(error, res, body) => {
this.error = error
this.res = res
this.body = body
return done()
}
)
})
return it('should return a failure status', function() {
return this.body.compile.status.should.equal('failure')
})
})
})
})

View File

@@ -0,0 +1,285 @@
/* eslint-disable
camelcase,
handle-callback-err,
no-path-concat,
no-return-assign,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS103: Rewrite code to no longer use __guard__
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const Client = require('./helpers/Client')
const request = require('request')
require('chai').should()
const fs = require('fs')
const fsExtra = require('fs-extra')
const ChildProcess = require('child_process')
const ClsiApp = require('./helpers/ClsiApp')
const logger = require('logger-sharelatex')
const Path = require('path')
const fixturePath = path => {
if (path.slice(0, 3) === 'tmp') {
return '/tmp/clsi_acceptance_tests' + path.slice(3)
}
return Path.normalize(__dirname + '/../fixtures/' + path)
}
const process = require('process')
console.log(
process.pid,
process.ppid,
process.getuid(),
process.getgroups(),
'PID'
)
const MOCHA_LATEX_TIMEOUT = 60 * 1000
const convertToPng = function(pdfPath, pngPath, callback) {
if (callback == null) {
callback = function(error) {}
}
const command = `convert ${fixturePath(pdfPath)} ${fixturePath(pngPath)}`
console.log('COMMAND')
console.log(command)
const convert = ChildProcess.exec(command)
const stdout = ''
convert.stdout.on('data', chunk => console.log('STDOUT', chunk.toString()))
convert.stderr.on('data', chunk => console.log('STDERR', chunk.toString()))
return convert.on('exit', () => callback())
}
const compare = function(originalPath, generatedPath, callback) {
if (callback == null) {
callback = function(error, same) {}
}
const diff_file = `${fixturePath(generatedPath)}-diff.png`
const proc = ChildProcess.exec(
`compare -metric mae ${fixturePath(originalPath)} ${fixturePath(
generatedPath
)} ${diff_file}`
)
let stderr = ''
proc.stderr.on('data', chunk => (stderr += chunk))
return proc.on('exit', () => {
if (stderr.trim() === '0 (0)') {
// remove output diff if test matches expected image
fs.unlink(diff_file, err => {
if (err) {
throw err
}
})
return callback(null, true)
} else {
console.log('compare result', stderr)
return callback(null, false)
}
})
}
const checkPdfInfo = function(pdfPath, callback) {
if (callback == null) {
callback = function(error, output) {}
}
const proc = ChildProcess.exec(`pdfinfo ${fixturePath(pdfPath)}`)
let stdout = ''
proc.stdout.on('data', chunk => (stdout += chunk))
proc.stderr.on('data', chunk => console.log('STDERR', chunk.toString()))
return proc.on('exit', () => {
if (stdout.match(/Optimized:\s+yes/)) {
return callback(null, true)
} else {
return callback(null, false)
}
})
}
const compareMultiplePages = function(project_id, callback) {
if (callback == null) {
callback = function(error) {}
}
var compareNext = function(page_no, callback) {
const path = `tmp/${project_id}-source-${page_no}.png`
return fs.stat(fixturePath(path), (error, stat) => {
if (error != null) {
return callback()
} else {
return compare(
`tmp/${project_id}-source-${page_no}.png`,
`tmp/${project_id}-generated-${page_no}.png`,
(error, same) => {
if (error != null) {
throw error
}
same.should.equal(true)
return compareNext(page_no + 1, callback)
}
)
}
})
}
return compareNext(0, callback)
}
const comparePdf = function(project_id, example_dir, callback) {
if (callback == null) {
callback = function(error) {}
}
console.log('CONVERT')
console.log(`tmp/${project_id}.pdf`, `tmp/${project_id}-generated.png`)
return convertToPng(
`tmp/${project_id}.pdf`,
`tmp/${project_id}-generated.png`,
error => {
if (error != null) {
throw error
}
return convertToPng(
`examples/${example_dir}/output.pdf`,
`tmp/${project_id}-source.png`,
error => {
if (error != null) {
throw error
}
return fs.stat(
fixturePath(`tmp/${project_id}-source-0.png`),
(error, stat) => {
if (error != null) {
return compare(
`tmp/${project_id}-source.png`,
`tmp/${project_id}-generated.png`,
(error, same) => {
if (error != null) {
throw error
}
same.should.equal(true)
return callback()
}
)
} else {
return compareMultiplePages(project_id, error => {
if (error != null) {
throw error
}
return callback()
})
}
}
)
}
)
}
)
}
const downloadAndComparePdf = function(project_id, example_dir, url, callback) {
if (callback == null) {
callback = function(error) {}
}
const writeStream = fs.createWriteStream(fixturePath(`tmp/${project_id}.pdf`))
request.get(url).pipe(writeStream)
console.log('writing file out', fixturePath(`tmp/${project_id}.pdf`))
return writeStream.on('close', () => {
return checkPdfInfo(`tmp/${project_id}.pdf`, (error, optimised) => {
if (error != null) {
throw error
}
optimised.should.equal(true)
return comparePdf(project_id, example_dir, callback)
})
})
}
Client.runServer(4242, fixturePath('examples'))
describe('Example Documents', function() {
before(function(done) {
ClsiApp.ensureRunning(done)
})
before(function(done) {
fsExtra.remove(fixturePath('tmp'), done)
})
before(function(done) {
fs.mkdir(fixturePath('tmp'), done)
})
after(function(done) {
fsExtra.remove(fixturePath('tmp'), done)
})
return Array.from(fs.readdirSync(fixturePath('examples'))).map(example_dir =>
(example_dir =>
describe(example_dir, function() {
before(function() {
return (this.project_id = Client.randomId() + '_' + example_dir)
})
it('should generate the correct pdf', function(done) {
this.timeout(MOCHA_LATEX_TIMEOUT)
return Client.compileDirectory(
this.project_id,
fixturePath('examples'),
example_dir,
4242,
(error, res, body) => {
if (
error ||
__guard__(
body != null ? body.compile : undefined,
x => x.status
) === 'failure'
) {
console.log('DEBUG: error', error, 'body', JSON.stringify(body))
}
const pdf = Client.getOutputFile(body, 'pdf')
return downloadAndComparePdf(
this.project_id,
example_dir,
pdf.url,
done
)
}
)
})
return it('should generate the correct pdf on the second run as well', function(done) {
this.timeout(MOCHA_LATEX_TIMEOUT)
return Client.compileDirectory(
this.project_id,
fixturePath('examples'),
example_dir,
4242,
(error, res, body) => {
if (
error ||
__guard__(
body != null ? body.compile : undefined,
x => x.status
) === 'failure'
) {
console.log('DEBUG: error', error, 'body', JSON.stringify(body))
}
const pdf = Client.getOutputFile(body, 'pdf')
return downloadAndComparePdf(
this.project_id,
example_dir,
pdf.url,
done
)
}
)
})
}))(example_dir)
)
})
function __guard__(value, transform) {
return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
}

View File

@@ -0,0 +1,71 @@
/* eslint-disable
handle-callback-err,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const Client = require('./helpers/Client')
const request = require('request')
require('chai').should()
const ClsiApp = require('./helpers/ClsiApp')
describe('Simple LaTeX file', function() {
before(function(done) {
this.project_id = Client.randomId()
this.request = {
resources: [
{
path: 'main.tex',
content: `\
\\documentclass{article}
\\begin{document}
Hello world
\\end{document}\
`
}
]
}
return ClsiApp.ensureRunning(() => {
return Client.compile(
this.project_id,
this.request,
(error, res, body) => {
this.error = error
this.res = res
this.body = body
return done()
}
)
})
})
it('should return the PDF', function() {
const pdf = Client.getOutputFile(this.body, 'pdf')
return pdf.type.should.equal('pdf')
})
it('should return the log', function() {
const log = Client.getOutputFile(this.body, 'log')
return log.type.should.equal('log')
})
it('should provide the pdf for download', function(done) {
const pdf = Client.getOutputFile(this.body, 'pdf')
return request.get(pdf.url, (error, res, body) => {
res.statusCode.should.equal(200)
return done()
})
})
return it('should provide the log for download', function(done) {
const log = Client.getOutputFile(this.body, 'pdf')
return request.get(log.url, (error, res, body) => {
res.statusCode.should.equal(200)
return done()
})
})
})

View File

@@ -0,0 +1,191 @@
/* eslint-disable
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const Client = require('./helpers/Client')
const request = require('request')
require('chai').should()
const { expect } = require('chai')
const ClsiApp = require('./helpers/ClsiApp')
const crypto = require('crypto')
describe('Syncing', function() {
before(function(done) {
const content = `\
\\documentclass{article}
\\begin{document}
Hello world
\\end{document}\
`
this.request = {
resources: [
{
path: 'main.tex',
content
}
]
}
this.project_id = Client.randomId()
return ClsiApp.ensureRunning(() => {
return Client.compile(
this.project_id,
this.request,
(error, res, body) => {
this.error = error
this.res = res
this.body = body
return done()
}
)
})
})
describe('from code to pdf', function() {
return it('should return the correct location', function(done) {
return Client.syncFromCode(
this.project_id,
'main.tex',
3,
5,
(error, pdfPositions) => {
if (error != null) {
throw error
}
expect(pdfPositions).to.deep.equal({
pdf: [
{ page: 1, h: 133.77, v: 134.76, height: 6.92, width: 343.71 }
]
})
return done()
}
)
})
})
describe('from pdf to code', function() {
return it('should return the correct location', function(done) {
return Client.syncFromPdf(
this.project_id,
1,
100,
200,
(error, codePositions) => {
if (error != null) {
throw error
}
expect(codePositions).to.deep.equal({
code: [{ file: 'main.tex', line: 3, column: -1 }]
})
return done()
}
)
})
})
describe('when the project directory is not available', function() {
before(function() {
this.other_project_id = Client.randomId()
})
describe('from code to pdf', function() {
it('should return a 404 response', function(done) {
return Client.syncFromCode(
this.other_project_id,
'main.tex',
3,
5,
(error, body) => {
if (error != null) {
throw error
}
expect(body).to.equal('Not Found')
return done()
}
)
})
})
describe('from pdf to code', function() {
it('should return a 404 response', function(done) {
return Client.syncFromPdf(
this.other_project_id,
1,
100,
200,
(error, body) => {
if (error != null) {
throw error
}
expect(body).to.equal('Not Found')
return done()
}
)
})
})
})
describe('when the synctex file is not available', function() {
before(function(done) {
this.broken_project_id = Client.randomId()
const content = 'this is not valid tex' // not a valid tex file
this.request = {
resources: [
{
path: 'main.tex',
content
}
]
}
Client.compile(
this.broken_project_id,
this.request,
(error, res, body) => {
this.error = error
this.res = res
this.body = body
return done()
}
)
})
describe('from code to pdf', function() {
it('should return a 404 response', function(done) {
return Client.syncFromCode(
this.broken_project_id,
'main.tex',
3,
5,
(error, body) => {
if (error != null) {
throw error
}
expect(body).to.equal('Not Found')
return done()
}
)
})
})
describe('from pdf to code', function() {
it('should return a 404 response', function(done) {
return Client.syncFromPdf(
this.broken_project_id,
1,
100,
200,
(error, body) => {
if (error != null) {
throw error
}
expect(body).to.equal('Not Found')
return done()
}
)
})
})
})
})

View File

@@ -0,0 +1,62 @@
/* eslint-disable
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const Client = require('./helpers/Client')
const request = require('request')
require('chai').should()
const ClsiApp = require('./helpers/ClsiApp')
describe('Timed out compile', function() {
before(function(done) {
this.request = {
options: {
timeout: 10
}, // seconds
resources: [
{
path: 'main.tex',
content: `\
\\documentclass{article}
\\begin{document}
\\def\\x{Hello!\\par\\x}
\\x
\\end{document}\
`
}
]
}
this.project_id = Client.randomId()
return ClsiApp.ensureRunning(() => {
return Client.compile(
this.project_id,
this.request,
(error, res, body) => {
this.error = error
this.res = res
this.body = body
return done()
}
)
})
})
it('should return a timeout error', function() {
return this.body.compile.error.should.equal('container timed out')
})
it('should return a timedout status', function() {
return this.body.compile.status.should.equal('timedout')
})
return it('should return the log output file name', function() {
const outputFilePaths = this.body.compile.outputFiles.map(x => x.path)
return outputFilePaths.should.include('output.log')
})
})

View File

@@ -0,0 +1,372 @@
/* eslint-disable
no-path-concat,
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const Client = require('./helpers/Client')
require('chai').should()
const sinon = require('sinon')
const ClsiApp = require('./helpers/ClsiApp')
const host = 'localhost'
const Server = {
run() {
const express = require('express')
const app = express()
const staticServer = express.static(__dirname + '/../fixtures/')
app.get('/:random_id/*', (req, res, next) => {
this.getFile(req.url)
req.url = `/${req.params[0]}`
return staticServer(req, res, next)
})
return app.listen(31415, host)
},
getFile() {},
randomId() {
return Math.random()
.toString(16)
.slice(2)
}
}
Server.run()
describe('Url Caching', function() {
describe('Downloading an image for the first time', function() {
before(function(done) {
this.project_id = Client.randomId()
this.file = `${Server.randomId()}/lion.png`
this.request = {
resources: [
{
path: 'main.tex',
content: `\
\\documentclass{article}
\\usepackage{graphicx}
\\begin{document}
\\includegraphics{lion.png}
\\end{document}\
`
},
{
path: 'lion.png',
url: `http://${host}:31415/${this.file}`
}
]
}
sinon.spy(Server, 'getFile')
return ClsiApp.ensureRunning(() => {
return Client.compile(
this.project_id,
this.request,
(error, res, body) => {
this.error = error
this.res = res
this.body = body
return done()
}
)
})
})
afterEach(function() {
return Server.getFile.restore()
})
return it('should download the image', function() {
return Server.getFile.calledWith(`/${this.file}`).should.equal(true)
})
})
describe('When an image is in the cache and the last modified date is unchanged', function() {
before(function(done) {
this.project_id = Client.randomId()
this.file = `${Server.randomId()}/lion.png`
this.request = {
resources: [
{
path: 'main.tex',
content: `\
\\documentclass{article}
\\usepackage{graphicx}
\\begin{document}
\\includegraphics{lion.png}
\\end{document}\
`
},
(this.image_resource = {
path: 'lion.png',
url: `http://${host}:31415/${this.file}`,
modified: Date.now()
})
]
}
return Client.compile(
this.project_id,
this.request,
(error, res, body) => {
this.error = error
this.res = res
this.body = body
sinon.spy(Server, 'getFile')
return Client.compile(
this.project_id,
this.request,
(error1, res1, body1) => {
this.error = error1
this.res = res1
this.body = body1
return done()
}
)
}
)
})
after(function() {
return Server.getFile.restore()
})
return it('should not download the image again', function() {
return Server.getFile.called.should.equal(false)
})
})
describe('When an image is in the cache and the last modified date is advanced', function() {
before(function(done) {
this.project_id = Client.randomId()
this.file = `${Server.randomId()}/lion.png`
this.request = {
resources: [
{
path: 'main.tex',
content: `\
\\documentclass{article}
\\usepackage{graphicx}
\\begin{document}
\\includegraphics{lion.png}
\\end{document}\
`
},
(this.image_resource = {
path: 'lion.png',
url: `http://${host}:31415/${this.file}`,
modified: (this.last_modified = Date.now())
})
]
}
return Client.compile(
this.project_id,
this.request,
(error, res, body) => {
this.error = error
this.res = res
this.body = body
sinon.spy(Server, 'getFile')
this.image_resource.modified = new Date(this.last_modified + 3000)
return Client.compile(
this.project_id,
this.request,
(error1, res1, body1) => {
this.error = error1
this.res = res1
this.body = body1
return done()
}
)
}
)
})
afterEach(function() {
return Server.getFile.restore()
})
return it('should download the image again', function() {
return Server.getFile.called.should.equal(true)
})
})
describe('When an image is in the cache and the last modified date is further in the past', function() {
before(function(done) {
this.project_id = Client.randomId()
this.file = `${Server.randomId()}/lion.png`
this.request = {
resources: [
{
path: 'main.tex',
content: `\
\\documentclass{article}
\\usepackage{graphicx}
\\begin{document}
\\includegraphics{lion.png}
\\end{document}\
`
},
(this.image_resource = {
path: 'lion.png',
url: `http://${host}:31415/${this.file}`,
modified: (this.last_modified = Date.now())
})
]
}
return Client.compile(
this.project_id,
this.request,
(error, res, body) => {
this.error = error
this.res = res
this.body = body
sinon.spy(Server, 'getFile')
this.image_resource.modified = new Date(this.last_modified - 3000)
return Client.compile(
this.project_id,
this.request,
(error1, res1, body1) => {
this.error = error1
this.res = res1
this.body = body1
return done()
}
)
}
)
})
afterEach(function() {
return Server.getFile.restore()
})
return it('should not download the image again', function() {
return Server.getFile.called.should.equal(false)
})
})
describe('When an image is in the cache and the last modified date is not specified', function() {
before(function(done) {
this.project_id = Client.randomId()
this.file = `${Server.randomId()}/lion.png`
this.request = {
resources: [
{
path: 'main.tex',
content: `\
\\documentclass{article}
\\usepackage{graphicx}
\\begin{document}
\\includegraphics{lion.png}
\\end{document}\
`
},
(this.image_resource = {
path: 'lion.png',
url: `http://${host}:31415/${this.file}`,
modified: (this.last_modified = Date.now())
})
]
}
return Client.compile(
this.project_id,
this.request,
(error, res, body) => {
this.error = error
this.res = res
this.body = body
sinon.spy(Server, 'getFile')
delete this.image_resource.modified
return Client.compile(
this.project_id,
this.request,
(error1, res1, body1) => {
this.error = error1
this.res = res1
this.body = body1
return done()
}
)
}
)
})
afterEach(function() {
return Server.getFile.restore()
})
return it('should download the image again', function() {
return Server.getFile.called.should.equal(true)
})
})
return describe('After clearing the cache', function() {
before(function(done) {
this.project_id = Client.randomId()
this.file = `${Server.randomId()}/lion.png`
this.request = {
resources: [
{
path: 'main.tex',
content: `\
\\documentclass{article}
\\usepackage{graphicx}
\\begin{document}
\\includegraphics{lion.png}
\\end{document}\
`
},
(this.image_resource = {
path: 'lion.png',
url: `http://${host}:31415/${this.file}`,
modified: (this.last_modified = Date.now())
})
]
}
return Client.compile(this.project_id, this.request, error => {
if (error != null) {
throw error
}
return Client.clearCache(this.project_id, (error, res, body) => {
if (error != null) {
throw error
}
sinon.spy(Server, 'getFile')
return Client.compile(
this.project_id,
this.request,
(error1, res1, body1) => {
this.error = error1
this.res = res1
this.body = body1
return done()
}
)
})
})
})
afterEach(function() {
return Server.getFile.restore()
})
return it('should download the image again', function() {
return Server.getFile.called.should.equal(true)
})
})
})

View File

@@ -0,0 +1,72 @@
/* eslint-disable
no-unused-vars,
*/
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
const Client = require('./helpers/Client')
const request = require('request')
require('chai').should()
const { expect } = require('chai')
const path = require('path')
const fs = require('fs')
const ClsiApp = require('./helpers/ClsiApp')
describe('Syncing', function() {
before(function(done) {
this.request = {
resources: [
{
path: 'main.tex',
content: fs.readFileSync(
path.join(__dirname, '../fixtures/naugty_strings.txt'),
'utf-8'
)
}
]
}
this.project_id = Client.randomId()
return ClsiApp.ensureRunning(() => {
return Client.compile(
this.project_id,
this.request,
(error, res, body) => {
this.error = error
this.res = res
this.body = body
return done()
}
)
})
})
return describe('wordcount file', function() {
return it('should return wordcount info', function(done) {
return Client.wordcount(this.project_id, 'main.tex', (error, result) => {
if (error != null) {
throw error
}
expect(result).to.deep.equal({
texcount: {
encode: 'utf8',
textWords: 2281,
headWords: 2,
outside: 0,
headers: 2,
elements: 0,
mathInline: 6,
mathDisplay: 0,
errors: 0,
messages: ''
}
})
return done()
})
})
})
})

Some files were not shown because too many files have changed in this diff Show More