Compare commits
750 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| bb7dbcbf82 | |||
| 41e82efd26 | |||
| f19eab4c55 | |||
| 01ad13b5b4 | |||
| b7242ba6dd | |||
|
|
a883084bba | ||
|
|
0de0cc484d | ||
|
|
1625126924 | ||
|
|
ef90db025c | ||
|
|
1650a61831 | ||
|
|
cb96c04c21 | ||
|
|
610b04a738 | ||
|
|
0a05fa7abc | ||
|
|
c769ae63a2 | ||
|
|
f4fb979c63 | ||
|
|
c9590c8cfa | ||
|
|
d9b25cdb18 | ||
|
|
faad50ff4c | ||
|
|
c8371a8ea7 | ||
|
|
1708e29faf | ||
|
|
5b92439356 | ||
|
|
d86a856997 | ||
|
|
2a31f8c8d7 | ||
|
|
f2734c86ca | ||
|
|
73cf107029 | ||
|
|
92fbb9581e | ||
|
|
9ab5738ae3 | ||
|
|
ca61354cfe | ||
|
|
db1dbdb42f | ||
|
|
004550f6e6 | ||
|
|
205f61ae47 | ||
|
|
a16bb95c4e | ||
|
|
57dfd9c01b | ||
|
|
f4561c2fe2 | ||
|
|
1ee48d0274 | ||
|
|
384d544bf2 | ||
|
|
df09caaf46 | ||
|
|
e59250d1fd | ||
|
|
e0176bbcbc | ||
|
|
53cc80fc7f | ||
|
|
47d1196dde | ||
|
|
267ff9e7f1 | ||
|
|
0cecf26569 | ||
|
|
ee0e8066d3 | ||
|
|
6edb458910 | ||
|
|
5ed09d1a98 | ||
|
|
ad8fec6a1a | ||
|
|
c30e6a9d4f | ||
|
|
b1ca08fd0c | ||
|
|
d98745431b | ||
|
|
6b69e26de3 | ||
|
|
a8286e7742 | ||
|
|
58c6fe7c35 | ||
|
|
74a11c7be3 | ||
|
|
1f3217f598 | ||
|
|
52f4bfe9e2 | ||
|
|
a88000281f | ||
|
|
b33734bab6 | ||
|
|
6c7019ccb7 | ||
|
|
bad3850fcc | ||
|
|
9b92793b89 | ||
|
|
6569da0242 | ||
|
|
33d6462875 | ||
|
|
19690e7847 | ||
|
|
5aa90abc2d | ||
|
|
ba7de90a50 | ||
|
|
7ceadc8599 | ||
|
|
f077c337ec | ||
|
|
eb603f9f31 | ||
|
|
385cdd6f0c | ||
|
|
303fb03f1f | ||
|
|
3e3e4503eb | ||
|
|
70363a9109 | ||
|
|
59310cbb09 | ||
|
|
d88136c569 | ||
|
|
0d44fb704b | ||
|
|
bf2430f1fc | ||
|
|
2211ebcefb | ||
|
|
440ec5553e | ||
|
|
17c14b1192 | ||
|
|
8c60406bb5 | ||
|
|
9db18c95a5 | ||
|
|
985bbf27c9 | ||
|
|
f8cb5e36af | ||
|
|
1bcb370ca1 | ||
|
|
e3c278e708 | ||
|
|
54896fb157 | ||
|
|
fec359afac | ||
|
|
97f5691c87 | ||
|
|
9807b51519 | ||
|
|
b8125e396a | ||
|
|
73afa1a8d7 | ||
|
|
942678de38 | ||
|
|
3834c37013 | ||
|
|
a425412bdd | ||
|
|
c004d299c1 | ||
|
|
5ab45c1031 | ||
|
|
0bd99a3edc | ||
|
|
3592ffda52 | ||
|
|
5b5fd2f5df | ||
|
|
7254a025ae | ||
|
|
4bd567b78d | ||
|
|
b10f0ae9ad | ||
|
|
a4c64d7151 | ||
|
|
557dc47e30 | ||
|
|
b18c9854b6 | ||
|
|
3513748f73 | ||
|
|
2b2fcca39c | ||
|
|
9e82ab0890 | ||
|
|
e3da458b37 | ||
|
|
8fa4232148 | ||
|
|
2bd4c1dbee | ||
|
|
44f30d6c2e | ||
|
|
c56dcc4d15 | ||
|
|
2ff0564009 | ||
|
|
3db40804ba | ||
|
|
4ee0dc2471 | ||
|
|
c5d10d02fc | ||
|
|
b69ea2f83f | ||
|
|
638d2f30d3 | ||
|
|
54ce1373af | ||
|
|
00c5ace592 | ||
|
|
b445f7fb78 | ||
|
|
4a26ec975f | ||
|
|
96a41a5f17 | ||
|
|
2e5e040475 | ||
|
|
93c121f1dc | ||
|
|
4a47f21edd | ||
|
|
6f837f1a74 | ||
|
|
3ff9c18dcb | ||
|
|
ffb33ddb40 | ||
|
|
a850fec664 | ||
|
|
c49381cfba | ||
|
|
2e211fd218 | ||
|
|
f3cf7ef9a0 | ||
|
|
26122bef3a | ||
|
|
8673a99de3 | ||
|
|
7a0724d07b | ||
|
|
98fedeba16 | ||
|
|
9fc13845a1 | ||
|
|
bd938047e3 | ||
|
|
a93d25562e | ||
|
|
bf470cf5ae | ||
|
|
957f80ada4 | ||
|
|
a327c217e7 | ||
|
|
d180fcd84a | ||
|
|
699000111b | ||
|
|
4f01b7716e | ||
|
|
d1e0b8548e | ||
|
|
8772e1f7b1 | ||
|
|
8bcbffccdc | ||
|
|
eb2e84df9b | ||
|
|
ee12573b06 | ||
|
|
7e737bba4f | ||
|
|
de36ab663c | ||
|
|
62d20ee5f0 | ||
|
|
8694fce0c9 | ||
|
|
71a50dd11f | ||
|
|
7996f44942 | ||
|
|
95854a3abb | ||
|
|
955749a7c4 | ||
|
|
a2a3fddd54 | ||
|
|
7e2542319f | ||
|
|
b515397b5a | ||
|
|
0cb5426548 | ||
|
|
79a0891fee | ||
|
|
18e6b4715d | ||
|
|
cffbd4e9ef | ||
|
|
4576ef54fb | ||
|
|
c056ca6968 | ||
|
|
4655768fd2 | ||
|
|
37794788ce | ||
|
|
f8fff476dd | ||
|
|
e14da0f9a6 | ||
|
|
725074c09d | ||
|
|
be28b9f6f9 | ||
|
|
4611e245ed | ||
|
|
82cc99c632 | ||
|
|
b15435d1a8 | ||
|
|
1fc3292966 | ||
|
|
ec628a4e59 | ||
|
|
c2295f40f8 | ||
|
|
e897945a17 | ||
|
|
812c4e661f | ||
|
|
186c8dcb2f | ||
|
|
9357d1e8fd | ||
|
|
296f4cc2ff | ||
|
|
f285e08ee0 | ||
|
|
7894269b8d | ||
|
|
4648efe4f1 | ||
|
|
faca2c6976 | ||
|
|
10e9be3cd2 | ||
|
|
dc02e986bf | ||
|
|
c6c9bb9d3a | ||
|
|
5ad9f28116 | ||
|
|
99b95df1ad | ||
|
|
c6af21ffd6 | ||
|
|
2cd1f1b3d7 | ||
|
|
775306aa63 | ||
|
|
a62ff6e248 | ||
|
|
481a49a587 | ||
|
|
2675fa033e | ||
|
|
dc6af8799f | ||
|
|
61bed0da2b | ||
|
|
4f6ef61626 | ||
|
|
ada07ad2c3 | ||
|
|
bc530c70e2 | ||
|
|
db00288bb9 | ||
|
|
663ec88718 | ||
|
|
03047f45af | ||
|
|
11cf8a98fa | ||
|
|
d2c2629ef5 | ||
|
|
adfeffd254 | ||
|
|
bd42fe5776 | ||
|
|
3200161308 | ||
|
|
9cb14660d4 | ||
|
|
31153c479c | ||
|
|
f422bb8011 | ||
|
|
25c4c349d7 | ||
|
|
e2377e1c1c | ||
|
|
1899d27732 | ||
|
|
9bf3795ceb | ||
|
|
d20856f799 | ||
|
|
12fee9e4df | ||
|
|
ddaa944aa3 | ||
|
|
a194d7ad05 | ||
|
|
4c8b619ee8 | ||
|
|
4bd67d5e7e | ||
|
|
c269c308ef | ||
|
|
e12ffdd535 | ||
|
|
82afad7afc | ||
|
|
2fceac6ac8 | ||
|
|
d4e9aca9e2 | ||
|
|
5d2eb129e8 | ||
|
|
b52a8b2aa2 | ||
|
|
6fbdcd76d0 | ||
|
|
541dac11cb | ||
|
|
ee7947f54d | ||
|
|
984474ee11 | ||
|
|
be855805c9 | ||
|
|
2d023a3b03 | ||
|
|
1894e8ad5d | ||
|
|
9507f0f80f | ||
|
|
19078fe866 | ||
|
|
38874f9169 | ||
|
|
855f26c520 | ||
|
|
8401bbdc26 | ||
|
|
71181243b3 | ||
|
|
0b4ae6ef8d | ||
|
|
747c73fdad | ||
|
|
1c1610a0bc | ||
|
|
434e819d23 | ||
|
|
f92e626647 | ||
|
|
6159aff001 | ||
|
|
49d5ad711a | ||
|
|
bcdac34a0b | ||
|
|
25cb54d1d7 | ||
|
|
75e77a3991 | ||
|
|
49f3b7d54f | ||
|
|
f1ab938bab | ||
|
|
a18d49562c | ||
|
|
d3039a52f3 | ||
|
|
7e07b8b4a7 | ||
|
|
473efdae70 | ||
|
|
3aa160b0e7 | ||
|
|
114e4f7043 | ||
|
|
cd0a71caba | ||
|
|
96d6fb3404 | ||
|
|
1481b4fe50 | ||
|
|
3aad472a83 | ||
|
|
49ddcee0c6 | ||
|
|
6d1545a40e | ||
|
|
9ce7bfa8ab | ||
|
|
7c4c8a9e44 | ||
|
|
90436933da | ||
|
|
77abf19f6b | ||
|
|
a781c7f600 | ||
|
|
b07b7a84be | ||
|
|
58b4de905c | ||
|
|
5f9fb85613 | ||
|
|
d3bb863d0a | ||
|
|
00ebc87230 | ||
|
|
6299832a13 | ||
|
|
607bb74ffa | ||
|
|
b4107b7391 | ||
|
|
5074442702 | ||
|
|
05ddbd3a18 | ||
|
|
7b773474d9 | ||
|
|
e4d28addf9 | ||
|
|
171ad0329d | ||
|
|
834eeffda4 | ||
|
|
0f179a7c7c | ||
|
|
1990f20dc0 | ||
|
|
407c7c235b | ||
|
|
988f177f79 | ||
|
|
c6f49f04a9 | ||
|
|
a26d7093b4 | ||
|
|
eec0529ef7 | ||
|
|
382f30f810 | ||
|
|
95e052d059 | ||
|
|
9f79229835 | ||
|
|
95b2e8caae | ||
|
|
3890cdec37 | ||
|
|
3e3468d9e9 | ||
|
|
9ef9a3b780 | ||
|
|
ee518c1755 | ||
|
|
3a9206f1e7 | ||
|
|
d1ce49d6d7 | ||
|
|
627bed428e | ||
|
|
92e1240635 | ||
|
|
94a52333f7 | ||
|
|
c490479a1a | ||
|
|
f802717cb5 | ||
|
|
0eeee4284d | ||
|
|
e1c23be845 | ||
|
|
67d34fdaf0 | ||
|
|
465dc31e75 | ||
|
|
2b6032b249 | ||
|
|
3478c28fa3 | ||
|
|
3e26efe06f | ||
|
|
fb00098fc0 | ||
|
|
33092baf90 | ||
|
|
4830e9f785 | ||
|
|
368f9b1c5d | ||
|
|
bcb87620b5 | ||
|
|
dd015a05cb | ||
|
|
8d846f64a9 | ||
|
|
3545852173 | ||
|
|
7fc9412141 | ||
|
|
a960614eb4 | ||
|
|
38bd598eb4 | ||
|
|
97716365af | ||
|
|
c1277e9f22 | ||
|
|
a75cec7d52 | ||
|
|
6464aefdb4 | ||
|
|
ec85957ae4 | ||
|
|
4bfc02ef3b | ||
|
|
364c8097c8 | ||
|
|
911e1d58f7 | ||
|
|
dd93d37460 | ||
|
|
82b996b145 | ||
|
|
b3033c1686 | ||
|
|
547ef679b4 | ||
|
|
b30890ef99 | ||
|
|
926667f365 | ||
|
|
0a70985ba5 | ||
|
|
4ca8027cb8 | ||
|
|
da216c52e9 | ||
|
|
e6532b5681 | ||
|
|
85aec72206 | ||
|
|
f000ecb681 | ||
|
|
436f69f3a6 | ||
|
|
38e91ab3e4 | ||
|
|
0b3af7d759 | ||
|
|
9548615169 | ||
|
|
da814b0e3a | ||
|
|
e544ad9a23 | ||
|
|
1814f1c997 | ||
|
|
98a4e60eb7 | ||
|
|
039d5e01ec | ||
|
|
1d38dd3a92 | ||
|
|
ca23cd42ad | ||
|
|
b330ee2d5b | ||
|
|
b5a7eabaab | ||
|
|
ec75f9fa67 | ||
|
|
dc1ea9d3e9 | ||
|
|
4d955a8d41 | ||
|
|
0915ac8c60 | ||
|
|
aeb6f48945 | ||
|
|
8ccbfc7d32 | ||
|
|
0bd9377018 | ||
|
|
3c1d7ab264 | ||
|
|
3d9a93ad61 | ||
|
|
17c51c2ba0 | ||
|
|
f4226ecd0e | ||
|
|
6fbfcfc68b | ||
|
|
63145cc60c | ||
|
|
5739a2aeca | ||
|
|
9f8a68be38 | ||
|
|
1dce40c61f | ||
|
|
52982b8fcd | ||
|
|
a741a238a8 | ||
|
|
0c1b699bd5 | ||
|
|
dc3cb439d0 | ||
|
|
83c7068bd1 | ||
|
|
b9d94fb428 | ||
|
|
7dbed15fea | ||
|
|
3c4870f688 | ||
|
|
4ff1121353 | ||
|
|
aca9100c52 | ||
|
|
96a237fb74 | ||
|
|
4e6514b17e | ||
|
|
00cf5468d0 | ||
|
|
177c46df98 | ||
|
|
2f96350b7c | ||
|
|
f1df41112b | ||
|
|
b202af3cf2 | ||
|
|
3bdd50a231 | ||
|
|
3134b8aada | ||
|
|
aa0f9ee0be | ||
|
|
4dd11f3442 | ||
|
|
ae7357778e | ||
|
|
c6b962a8b9 | ||
|
|
3de14a3f17 | ||
|
|
49a35c5e11 | ||
|
|
b9874b5ae5 | ||
|
|
5cb3bfcbbb | ||
|
|
1a47887e80 | ||
|
|
70f016af1f | ||
|
|
b8c22f4d74 | ||
|
|
8f6db5baff | ||
|
|
d698cc318f | ||
|
|
12b13d6199 | ||
|
|
a02adacc98 | ||
|
|
a2a8b70b74 | ||
|
|
017ba3a4ec | ||
|
|
b64106b730 | ||
|
|
12c1dc632a | ||
|
|
7a6294081d | ||
|
|
7d8a18c46c | ||
|
|
a0d5e6a54b | ||
|
|
f58ef67875 | ||
|
|
6d42e18088 | ||
|
|
ef0db41dae | ||
|
|
3692570df0 | ||
|
|
8255997fad | ||
|
|
360e8220ce | ||
|
|
23f4f2175c | ||
|
|
eb35cab72d | ||
|
|
48b2548533 | ||
|
|
86cc30d8fa | ||
|
|
60ad425205 | ||
|
|
d63f339fc4 | ||
|
|
1da918e13c | ||
|
|
d1aa1d84fb | ||
|
|
88eafdf575 | ||
|
|
d8858cfadd | ||
|
|
fd0cbb2c52 | ||
|
|
bd5a0ef36f | ||
|
|
1388093866 | ||
|
|
c3e3e3d8ac | ||
|
|
23fec68111 | ||
|
|
dbeff9a7b8 | ||
|
|
f11468b595 | ||
|
|
0930b1cd8f | ||
|
|
a36ec7f54e | ||
|
|
eaa99c7274 | ||
|
|
b0f879d652 | ||
|
|
8305268848 | ||
|
|
aa5eeb0903 | ||
|
|
2af05030f2 | ||
|
|
d04f93855b | ||
|
|
a2c97e6f9a | ||
|
|
acab9d45a0 | ||
|
|
0fac2655f7 | ||
|
|
c1ca32184f | ||
|
|
97d7d76e61 | ||
|
|
d865fda6a9 | ||
|
|
3d053a2e34 | ||
|
|
faa2a325cb | ||
|
|
b42347ea08 | ||
|
|
d5b3101637 | ||
|
|
c1d1f93453 | ||
|
|
fc1782e74c | ||
|
|
6921cf25b8 | ||
|
|
0b9ddb8efe | ||
|
|
e8064f12a1 | ||
|
|
e4aad90f33 | ||
|
|
a8aaf58e64 | ||
|
|
5b5f7b0690 | ||
|
|
2b610030d5 | ||
|
|
00ddfdf42b | ||
|
|
c25e96bbc3 | ||
|
|
4eb8c107c9 | ||
|
|
86fa940c97 | ||
|
|
7cd81ac3df | ||
|
|
fdc22c9cd2 | ||
|
|
c3fe17d0b6 | ||
|
|
206adc2d04 | ||
|
|
6542ce20b6 | ||
|
|
b4be40d061 | ||
|
|
11898b897e | ||
|
|
74c26120b2 | ||
|
|
7e1d3d98e7 | ||
|
|
d5e0ab5a6f | ||
|
|
4c105e7826 | ||
|
|
cd5adaff51 | ||
|
|
e5081df2a9 | ||
|
|
104ce81ebd | ||
|
|
08fd440df5 | ||
|
|
11cd569ed9 | ||
|
|
472531f617 | ||
|
|
ea34a1a89d | ||
|
|
2e91f07014 | ||
|
|
6f322583f7 | ||
|
|
a74f4ac1a6 | ||
|
|
aa1dd2bf05 | ||
|
|
8e2584bab4 | ||
|
|
f8530da626 | ||
|
|
2edc015663 | ||
|
|
f94e9989ec | ||
|
|
c62f8b4854 | ||
|
|
2d389130cc | ||
|
|
aafa691119 | ||
|
|
a98b2b8032 | ||
|
|
398ba5ae34 | ||
|
|
a1613eac5a | ||
|
|
3526fde665 | ||
|
|
e1b44beb3f | ||
|
|
17b16dadcd | ||
|
|
eb1364f249 | ||
|
|
834ad57312 | ||
|
|
19dfaa7d55 | ||
|
|
b529b8add3 | ||
|
|
7ccc9500ed | ||
|
|
750576d1b0 | ||
|
|
021d848819 | ||
|
|
8803762081 | ||
|
|
5af137f60b | ||
|
|
f059948e27 | ||
|
|
7a7c2ee992 | ||
|
|
efe5e22b4c | ||
|
|
03d1936fde | ||
|
|
a0969ec839 | ||
|
|
fdab7763a2 | ||
|
|
57a5cfa9cb | ||
|
|
bfb27e6c25 | ||
|
|
d4d3048719 | ||
|
|
29594fd0f7 | ||
|
|
a50582fd7c | ||
|
|
08f0955817 | ||
|
|
bc1b8f4b2f | ||
|
|
599977c3e0 | ||
|
|
071b2269b3 | ||
|
|
fde8149579 | ||
|
|
6b7e33bbc6 | ||
|
|
2898a82de8 | ||
|
|
5b71b849ca | ||
|
|
6cb5926c21 | ||
|
|
3cffb61c74 | ||
|
|
5705455ce1 | ||
|
|
71fb15e0ee | ||
|
|
819c642b8d | ||
|
|
20cb52793d | ||
|
|
e507bd6394 | ||
|
|
444b3586a7 | ||
|
|
e25ebd296e | ||
|
|
5090ad5c41 | ||
|
|
bc73f719b2 | ||
|
|
d238f73e29 | ||
|
|
ea484da9f4 | ||
|
|
b76a81e98b | ||
|
|
f00be9018d | ||
|
|
146138f65c | ||
|
|
654a43655f | ||
|
|
b9d6db6caf | ||
|
|
03e837c1f4 | ||
|
|
420db18a03 | ||
|
|
dab92967c8 | ||
|
|
0530e21246 | ||
|
|
9e53c0b99e | ||
|
|
61089eca40 | ||
|
|
4827aec30b | ||
|
|
0900340282 | ||
|
|
f7b4883397 | ||
|
|
79b3d2172b | ||
|
|
9f49dc8554 | ||
|
|
ee170b4e67 | ||
|
|
47105190be | ||
|
|
98fb2cab99 | ||
|
|
4128dc6fdd | ||
|
|
4a2b2a8707 | ||
|
|
095e16e953 | ||
|
|
3a73971b42 | ||
|
|
748caeee7d | ||
|
|
cd7ed6ce66 | ||
|
|
2200ac2cf2 | ||
|
|
928ffc96e6 | ||
|
|
ade3da7e0d | ||
|
|
e66b1ecdea | ||
|
|
c6744caeeb | ||
|
|
189648e39a | ||
|
|
8da29e6024 | ||
|
|
664e908378 | ||
|
|
14837a57ec | ||
|
|
6524439699 | ||
|
|
a7c7f2697f | ||
|
|
fdf274fb82 | ||
|
|
69666bef60 | ||
|
|
cd8e60195c | ||
|
|
d6808c11cc | ||
|
|
133f522e7b | ||
|
|
d29416fc77 | ||
|
|
c486d6c215 | ||
|
|
8bb12f4d99 | ||
|
|
e4ffc94de8 | ||
|
|
0b8435e358 | ||
|
|
801f09e7ed | ||
|
|
603b3d617c | ||
|
|
b97627d6d8 | ||
|
|
da02661d53 | ||
|
|
6e017ecaf1 | ||
|
|
0887fe3a72 | ||
|
|
226e6c87b1 | ||
|
|
8c42a353e1 | ||
|
|
78b88683fc | ||
|
|
ac3b7a571a | ||
|
|
cda1e301f6 | ||
|
|
da324a8dd0 | ||
|
|
b2f687c061 | ||
|
|
2c3b1126b0 | ||
|
|
22f730c3e9 | ||
|
|
2e97bcba3a | ||
|
|
0da85d5d03 | ||
|
|
3379577499 | ||
|
|
855169b571 | ||
|
|
6b107bd20a | ||
|
|
a2c2fc3a51 | ||
|
|
f8ae215c1e | ||
|
|
d26c6b933e | ||
|
|
4496ddddfd | ||
|
|
434e00cb74 | ||
|
|
f92c70935b | ||
|
|
51f87c5f79 | ||
|
|
143913c67f | ||
|
|
dfd2bc31ef | ||
|
|
e70bd3ae8e | ||
|
|
0a5ca6b0fa | ||
|
|
834668b033 | ||
|
|
35240fbd4d | ||
|
|
5f7cd5ece5 | ||
|
|
6860d2be6c | ||
|
|
3c021fd4c9 | ||
|
|
f453f954e4 | ||
|
|
cd499fa4e5 | ||
|
|
7799e0bfdd | ||
|
|
6ca8c10734 | ||
|
|
84cba7365f | ||
|
|
11be12fc8e | ||
|
|
3e70c0f8e4 | ||
|
|
558e9ae22b | ||
|
|
83e373d7e1 | ||
|
|
24fc9391c3 | ||
|
|
7ff56c4793 | ||
|
|
665dbff75a | ||
|
|
5d6fb4579a | ||
|
|
bd036534e5 | ||
|
|
3dcd4af62e | ||
|
|
fe46a96fd2 | ||
|
|
8fcbec5c0f | ||
|
|
fbb00ebf2f | ||
|
|
6117cac1fd | ||
|
|
d949d4ac32 | ||
|
|
6af22cf184 | ||
|
|
9f104a4f57 | ||
|
|
595bfe09ac | ||
|
|
e64b08fcbe | ||
|
|
dcfe1118d4 | ||
|
|
89acd36dde | ||
|
|
a3383f11a1 | ||
|
|
2df886e330 | ||
|
|
d96605d5e8 | ||
|
|
03b75b12cf | ||
|
|
86cf05c732 | ||
|
|
4497352a3a | ||
|
|
601a3e4805 | ||
|
|
0ea28710f5 | ||
|
|
2b5e7be964 | ||
|
|
c178458223 | ||
|
|
3ed29b3489 | ||
|
|
29be2dc700 | ||
|
|
d83efdbc98 | ||
|
|
f6af22879f | ||
|
|
74c393cda3 | ||
|
|
561eaa0d66 | ||
|
|
440d1c605f | ||
|
|
6fae6ff40c | ||
|
|
caef254b80 | ||
|
|
ba475db052 | ||
|
|
c2054a5ec3 | ||
|
|
f270ff9801 | ||
|
|
71d2427cf1 | ||
|
|
a04d951b40 | ||
|
|
25d73ab6f9 | ||
|
|
f707ad0926 | ||
|
|
779d79ebfd | ||
|
|
03c516a87e | ||
|
|
19993f4548 | ||
|
|
ff5b203ecf | ||
|
|
7d8bc9fed9 | ||
|
|
484c0da84f | ||
|
|
3c97b98fc4 | ||
|
|
03c105c3d9 | ||
|
|
889fa65d0c | ||
|
|
558b5e000f | ||
|
|
94c4187eb2 | ||
|
|
9d3fdcf8b4 | ||
|
|
58ecfa69e6 | ||
|
|
d90fe49a4e | ||
|
|
2e91868bc8 | ||
|
|
d9d16b7189 | ||
|
|
fd6386207b | ||
|
|
9892751ff6 | ||
|
|
651279b21f | ||
|
|
8db907c766 | ||
|
|
85c6c3fe2b | ||
|
|
143f948193 | ||
|
|
a35df6d829 | ||
|
|
1a794d804a | ||
|
|
24e20a79f4 | ||
|
|
c47f49e24b | ||
|
|
65f2f23cf6 | ||
|
|
8d7d637eed | ||
|
|
7551bc3135 | ||
|
|
75ef0d6581 | ||
|
|
31f62c7a7b | ||
|
|
3a4dd9df50 | ||
|
|
916b4cb40b | ||
|
|
37cc9f3715 | ||
|
|
0692e964ef | ||
|
|
198e1ef492 | ||
|
|
280d64cf60 | ||
|
|
e7ed8d786a | ||
|
|
81e85de169 | ||
|
|
151ea99639 | ||
|
|
b8cdd4fa85 | ||
|
|
163a33674b | ||
|
|
67bfeacab8 | ||
|
|
1923352e66 | ||
|
|
f37004cec6 | ||
|
|
1a7500f102 | ||
|
|
90cda12ed9 | ||
|
|
c84bd4fa3f | ||
|
|
84f3d3061d | ||
|
|
2c4fbd10ed | ||
|
|
ff94a76eb9 | ||
|
|
92338ab419 | ||
|
|
5b2031b84f | ||
|
|
94397854c6 | ||
|
|
6bf8c22d78 | ||
|
|
b4f0da0c42 | ||
|
|
4886620d8a | ||
|
|
fc674370bd | ||
|
|
b418ea201b | ||
|
|
7f9c9176a9 | ||
|
|
af86745112 | ||
|
|
22e8ee59af | ||
|
|
225a12fcd2 | ||
|
|
f5ce83118c | ||
|
|
ae52819056 |
10
.dockerignore
Normal file
10
.dockerignore
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
node_modules/*
|
||||||
|
gitrev
|
||||||
|
.git
|
||||||
|
.gitignore
|
||||||
|
.npm
|
||||||
|
.nvmrc
|
||||||
|
nodemon.json
|
||||||
|
cache/
|
||||||
|
compiles/
|
||||||
|
db/
|
||||||
64
.eslintrc
Normal file
64
.eslintrc
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
// this file was auto-generated, do not edit it directly.
|
||||||
|
// instead run bin/update_build_scripts from
|
||||||
|
// https://github.com/sharelatex/sharelatex-dev-environment
|
||||||
|
{
|
||||||
|
"extends": [
|
||||||
|
"standard",
|
||||||
|
"prettier",
|
||||||
|
"prettier/standard"
|
||||||
|
],
|
||||||
|
"parserOptions": {
|
||||||
|
"ecmaVersion": 2018
|
||||||
|
},
|
||||||
|
"plugins": [
|
||||||
|
"mocha",
|
||||||
|
"chai-expect",
|
||||||
|
"chai-friendly"
|
||||||
|
],
|
||||||
|
"env": {
|
||||||
|
"node": true,
|
||||||
|
"mocha": true
|
||||||
|
},
|
||||||
|
"rules": {
|
||||||
|
// Swap the no-unused-expressions rule with a more chai-friendly one
|
||||||
|
"no-unused-expressions": 0,
|
||||||
|
"chai-friendly/no-unused-expressions": "error"
|
||||||
|
},
|
||||||
|
"overrides": [
|
||||||
|
{
|
||||||
|
// Test specific rules
|
||||||
|
"files": ["test/**/*.js"],
|
||||||
|
"globals": {
|
||||||
|
"expect": true
|
||||||
|
},
|
||||||
|
"rules": {
|
||||||
|
// mocha-specific rules
|
||||||
|
"mocha/handle-done-callback": "error",
|
||||||
|
"mocha/no-exclusive-tests": "error",
|
||||||
|
"mocha/no-global-tests": "error",
|
||||||
|
"mocha/no-identical-title": "error",
|
||||||
|
"mocha/no-nested-tests": "error",
|
||||||
|
"mocha/no-pending-tests": "error",
|
||||||
|
"mocha/no-skipped-tests": "error",
|
||||||
|
"mocha/no-mocha-arrows": "error",
|
||||||
|
|
||||||
|
// chai-specific rules
|
||||||
|
"chai-expect/missing-assertion": "error",
|
||||||
|
"chai-expect/terminating-properties": "error",
|
||||||
|
|
||||||
|
// prefer-arrow-callback applies to all callbacks, not just ones in mocha tests.
|
||||||
|
// we don't enforce this at the top-level - just in tests to manage `this` scope
|
||||||
|
// based on mocha's context mechanism
|
||||||
|
"mocha/prefer-arrow-callback": "error"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
// Backend specific rules
|
||||||
|
"files": ["app/**/*.js", "app.js", "index.js"],
|
||||||
|
"rules": {
|
||||||
|
// don't allow console.log in backend code
|
||||||
|
"no-console": "error"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
38
.github/ISSUE_TEMPLATE.md
vendored
Normal file
38
.github/ISSUE_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
<!-- BUG REPORT TEMPLATE -->
|
||||||
|
|
||||||
|
## Steps to Reproduce
|
||||||
|
<!-- Describe the steps leading up to when / where you found the bug. -->
|
||||||
|
<!-- Screenshots may be helpful here. -->
|
||||||
|
|
||||||
|
1.
|
||||||
|
2.
|
||||||
|
3.
|
||||||
|
|
||||||
|
## Expected Behaviour
|
||||||
|
<!-- What should have happened when you completed the steps above? -->
|
||||||
|
|
||||||
|
## Observed Behaviour
|
||||||
|
<!-- What actually happened when you completed the steps above? -->
|
||||||
|
<!-- Screenshots may be helpful here. -->
|
||||||
|
|
||||||
|
## Context
|
||||||
|
<!-- How has this issue affected you? What were you trying to accomplish? -->
|
||||||
|
|
||||||
|
## Technical Info
|
||||||
|
<!-- Provide any technical details that may be applicable (or N/A if not applicable). -->
|
||||||
|
|
||||||
|
* URL:
|
||||||
|
* Browser Name and version:
|
||||||
|
* Operating System and version (desktop or mobile):
|
||||||
|
* Signed in as:
|
||||||
|
* Project and/or file:
|
||||||
|
|
||||||
|
## Analysis
|
||||||
|
<!--- Optionally, document investigation of / suggest a fix for the bug, e.g. 'comes from this line / commit' -->
|
||||||
|
|
||||||
|
## Who Needs to Know?
|
||||||
|
<!-- If you want to bring this to the attention of particular people, @-mention them below. -->
|
||||||
|
<!-- If a user reported this bug and should be notified when it is fixed, provide the Front conversation link. -->
|
||||||
|
|
||||||
|
-
|
||||||
|
-
|
||||||
48
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
48
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
|
||||||
|
<!-- ** This is an Overleaf public repository ** -->
|
||||||
|
|
||||||
|
<!-- Please review https://github.com/overleaf/overleaf/blob/master/CONTRIBUTING.md for guidance on what is expected of a contribution. -->
|
||||||
|
|
||||||
|
### Description
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#### Screenshots
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#### Related Issues / PRs
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### Review
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#### Potential Impact
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#### Manual Testing Performed
|
||||||
|
|
||||||
|
- [ ]
|
||||||
|
- [ ]
|
||||||
|
|
||||||
|
#### Accessibility
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### Deployment
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#### Deployment Checklist
|
||||||
|
|
||||||
|
- [ ] Update documentation not included in the PR (if any)
|
||||||
|
- [ ]
|
||||||
|
|
||||||
|
#### Metrics and Monitoring
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#### Who Needs to Know?
|
||||||
23
.github/dependabot.yml
vendored
Normal file
23
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: "npm"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
|
||||||
|
pull-request-branch-name:
|
||||||
|
# Separate sections of the branch name with a hyphen
|
||||||
|
# Docker images use the branch name and do not support slashes in tags
|
||||||
|
# https://github.com/overleaf/google-ops/issues/822
|
||||||
|
# https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#pull-request-branch-nameseparator
|
||||||
|
separator: "-"
|
||||||
|
|
||||||
|
# Block informal upgrades -- security upgrades use a separate queue.
|
||||||
|
# https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#open-pull-requests-limit
|
||||||
|
open-pull-requests-limit: 0
|
||||||
|
|
||||||
|
# currently assign team-magma to all dependabot PRs - this may change in
|
||||||
|
# future if we reorganise teams
|
||||||
|
labels:
|
||||||
|
- "dependencies"
|
||||||
|
- "Team-Magma"
|
||||||
12
.gitignore
vendored
12
.gitignore
vendored
@@ -1,16 +1,16 @@
|
|||||||
**.swp
|
**.swp
|
||||||
node_modules
|
node_modules
|
||||||
app/js
|
|
||||||
test/unit/js
|
|
||||||
test/smoke/js
|
|
||||||
test/acceptance/js
|
|
||||||
test/acceptance/fixtures/tmp
|
test/acceptance/fixtures/tmp
|
||||||
compiles
|
compiles
|
||||||
app.js
|
|
||||||
.DS_Store
|
.DS_Store
|
||||||
*~
|
*~
|
||||||
cache
|
cache
|
||||||
.vagrant
|
.vagrant
|
||||||
db.sqlite
|
db.sqlite
|
||||||
|
db.sqlite-wal
|
||||||
|
db.sqlite-shm
|
||||||
config/*
|
config/*
|
||||||
bin/synctex
|
npm-debug.log
|
||||||
|
|
||||||
|
# managed by dev-environment$ bin/update_build_scripts
|
||||||
|
.npmrc
|
||||||
|
|||||||
7
.prettierrc
Normal file
7
.prettierrc
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
# This file was auto-generated, do not edit it directly.
|
||||||
|
# Instead run bin/update_build_scripts from
|
||||||
|
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||||
|
{
|
||||||
|
"semi": false,
|
||||||
|
"singleQuote": true
|
||||||
|
}
|
||||||
18
.travis.yml
18
.travis.yml
@@ -1,18 +0,0 @@
|
|||||||
language: node_js
|
|
||||||
|
|
||||||
node_js:
|
|
||||||
- "0.10"
|
|
||||||
|
|
||||||
before_install:
|
|
||||||
- npm install -g grunt-cli
|
|
||||||
|
|
||||||
install:
|
|
||||||
- npm install
|
|
||||||
- grunt install
|
|
||||||
|
|
||||||
script:
|
|
||||||
- grunt test:unit
|
|
||||||
|
|
||||||
services:
|
|
||||||
- redis-server
|
|
||||||
- mongodb
|
|
||||||
35
.viminfo
Normal file
35
.viminfo
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
# This viminfo file was generated by Vim 7.4.
|
||||||
|
# You may edit it if you're careful!
|
||||||
|
|
||||||
|
# Value of 'encoding' when this file was written
|
||||||
|
*encoding=latin1
|
||||||
|
|
||||||
|
|
||||||
|
# hlsearch on (H) or off (h):
|
||||||
|
~h
|
||||||
|
# Command Line History (newest to oldest):
|
||||||
|
:x
|
||||||
|
|
||||||
|
# Search String History (newest to oldest):
|
||||||
|
|
||||||
|
# Expression History (newest to oldest):
|
||||||
|
|
||||||
|
# Input Line History (newest to oldest):
|
||||||
|
|
||||||
|
# Input Line History (newest to oldest):
|
||||||
|
|
||||||
|
# Registers:
|
||||||
|
|
||||||
|
# File marks:
|
||||||
|
'0 1 0 ~/hello
|
||||||
|
|
||||||
|
# Jumplist (newest first):
|
||||||
|
-' 1 0 ~/hello
|
||||||
|
|
||||||
|
# History of marks within files (newest to oldest):
|
||||||
|
|
||||||
|
> ~/hello
|
||||||
|
" 1 0
|
||||||
|
^ 1 1
|
||||||
|
. 1 0
|
||||||
|
+ 1 0
|
||||||
28
Dockerfile
Normal file
28
Dockerfile
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
# This file was auto-generated, do not edit it directly.
|
||||||
|
# Instead run bin/update_build_scripts from
|
||||||
|
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||||
|
|
||||||
|
FROM node:10.22.1 as base
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
COPY install_deps.sh /app
|
||||||
|
RUN chmod 0755 ./install_deps.sh && ./install_deps.sh
|
||||||
|
ENTRYPOINT ["/bin/sh", "entrypoint.sh"]
|
||||||
|
COPY entrypoint.sh /app
|
||||||
|
|
||||||
|
FROM base as app
|
||||||
|
|
||||||
|
#wildcard as some files may not be in all repos
|
||||||
|
COPY package*.json npm-shrink*.json /app/
|
||||||
|
|
||||||
|
RUN npm ci --quiet
|
||||||
|
|
||||||
|
COPY . /app
|
||||||
|
|
||||||
|
FROM base
|
||||||
|
|
||||||
|
COPY --from=app /app /app
|
||||||
|
RUN mkdir -p cache compiles db \
|
||||||
|
&& chown node:node cache compiles db
|
||||||
|
|
||||||
|
CMD ["node", "--expose-gc", "app.js"]
|
||||||
@@ -1,97 +0,0 @@
|
|||||||
spawn = require("child_process").spawn
|
|
||||||
|
|
||||||
module.exports = (grunt) ->
|
|
||||||
grunt.initConfig
|
|
||||||
coffee:
|
|
||||||
app_src:
|
|
||||||
expand: true,
|
|
||||||
flatten: true,
|
|
||||||
cwd: "app"
|
|
||||||
src: ['coffee/*.coffee'],
|
|
||||||
dest: 'app/js/',
|
|
||||||
ext: '.js'
|
|
||||||
|
|
||||||
app:
|
|
||||||
src: "app.coffee"
|
|
||||||
dest: "app.js"
|
|
||||||
|
|
||||||
unit_tests:
|
|
||||||
expand: true
|
|
||||||
cwd: "test/unit/coffee"
|
|
||||||
src: ["**/*.coffee"]
|
|
||||||
dest: "test/unit/js/"
|
|
||||||
ext: ".js"
|
|
||||||
|
|
||||||
acceptance_tests:
|
|
||||||
expand: true
|
|
||||||
cwd: "test/acceptance/coffee"
|
|
||||||
src: ["**/*.coffee"]
|
|
||||||
dest: "test/acceptance/js/"
|
|
||||||
ext: ".js"
|
|
||||||
|
|
||||||
smoke_tests:
|
|
||||||
expand: true
|
|
||||||
cwd: "test/smoke/coffee"
|
|
||||||
src: ["**/*.coffee"]
|
|
||||||
dest: "test/smoke/js"
|
|
||||||
ext: ".js"
|
|
||||||
|
|
||||||
clean:
|
|
||||||
app: ["app/js/"]
|
|
||||||
unit_tests: ["test/unit/js"]
|
|
||||||
acceptance_tests: ["test/acceptance/js"]
|
|
||||||
smoke_tests: ["test/smoke/js"]
|
|
||||||
|
|
||||||
execute:
|
|
||||||
app:
|
|
||||||
src: "app.js"
|
|
||||||
|
|
||||||
mochaTest:
|
|
||||||
unit:
|
|
||||||
options:
|
|
||||||
reporter: "spec"
|
|
||||||
src: ["test/unit/js/**/*.js"]
|
|
||||||
acceptance:
|
|
||||||
options:
|
|
||||||
reporter: "spec"
|
|
||||||
timeout: 40000
|
|
||||||
grep: grunt.option("grep")
|
|
||||||
src: ["test/acceptance/js/**/*.js"]
|
|
||||||
smoke:
|
|
||||||
options:
|
|
||||||
reported: "spec"
|
|
||||||
timeout: 10000
|
|
||||||
src: ["test/smoke/js/**/*.js"]
|
|
||||||
|
|
||||||
grunt.loadNpmTasks 'grunt-contrib-coffee'
|
|
||||||
grunt.loadNpmTasks 'grunt-contrib-clean'
|
|
||||||
grunt.loadNpmTasks 'grunt-mocha-test'
|
|
||||||
grunt.loadNpmTasks 'grunt-shell'
|
|
||||||
grunt.loadNpmTasks 'grunt-execute'
|
|
||||||
grunt.loadNpmTasks 'grunt-bunyan'
|
|
||||||
|
|
||||||
grunt.registerTask 'compile:bin', () ->
|
|
||||||
callback = @async()
|
|
||||||
proc = spawn "cc", [
|
|
||||||
"-o", "bin/synctex", "-Isrc/synctex",
|
|
||||||
"src/synctex.c", "src/synctex/synctex_parser.c", "src/synctex/synctex_parser_utils.c", "-lz"
|
|
||||||
], stdio: "inherit"
|
|
||||||
proc.on "close", callback
|
|
||||||
|
|
||||||
grunt.registerTask 'compile:app', ['clean:app', 'coffee:app', 'coffee:app_src', 'coffee:smoke_tests', 'compile:bin']
|
|
||||||
grunt.registerTask 'run', ['compile:app', 'bunyan', 'execute']
|
|
||||||
|
|
||||||
grunt.registerTask 'compile:unit_tests', ['clean:unit_tests', 'coffee:unit_tests']
|
|
||||||
grunt.registerTask 'test:unit', ['compile:app', 'compile:unit_tests', 'mochaTest:unit']
|
|
||||||
|
|
||||||
grunt.registerTask 'compile:acceptance_tests', ['clean:acceptance_tests', 'coffee:acceptance_tests']
|
|
||||||
grunt.registerTask 'test:acceptance', ['compile:acceptance_tests', 'mochaTest:acceptance']
|
|
||||||
|
|
||||||
grunt.registerTask 'compile:smoke_tests', ['clean:smoke_tests', 'coffee:smoke_tests']
|
|
||||||
grunt.registerTask 'test:smoke', ['compile:smoke_tests', 'mochaTest:smoke']
|
|
||||||
|
|
||||||
grunt.registerTask 'install', 'compile:app'
|
|
||||||
|
|
||||||
grunt.registerTask 'default', ['run']
|
|
||||||
|
|
||||||
|
|
||||||
88
Makefile
Normal file
88
Makefile
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
# This file was auto-generated, do not edit it directly.
|
||||||
|
# Instead run bin/update_build_scripts from
|
||||||
|
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||||
|
|
||||||
|
BUILD_NUMBER ?= local
|
||||||
|
BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||||
|
PROJECT_NAME = clsi
|
||||||
|
BUILD_DIR_NAME = $(shell pwd | xargs basename | tr -cd '[a-zA-Z0-9_.\-]')
|
||||||
|
|
||||||
|
DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml
|
||||||
|
DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \
|
||||||
|
BRANCH_NAME=$(BRANCH_NAME) \
|
||||||
|
PROJECT_NAME=$(PROJECT_NAME) \
|
||||||
|
MOCHA_GREP=${MOCHA_GREP} \
|
||||||
|
docker-compose ${DOCKER_COMPOSE_FLAGS}
|
||||||
|
|
||||||
|
DOCKER_COMPOSE_TEST_ACCEPTANCE = \
|
||||||
|
COMPOSE_PROJECT_NAME=test_acceptance_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE)
|
||||||
|
|
||||||
|
DOCKER_COMPOSE_TEST_UNIT = \
|
||||||
|
COMPOSE_PROJECT_NAME=test_unit_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE)
|
||||||
|
|
||||||
|
clean:
|
||||||
|
docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||||
|
docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||||
|
|
||||||
|
format:
|
||||||
|
$(DOCKER_COMPOSE) run --rm test_unit npm run --silent format
|
||||||
|
|
||||||
|
format_fix:
|
||||||
|
$(DOCKER_COMPOSE) run --rm test_unit npm run --silent format:fix
|
||||||
|
|
||||||
|
lint:
|
||||||
|
$(DOCKER_COMPOSE) run --rm test_unit npm run --silent lint
|
||||||
|
|
||||||
|
test: format lint test_unit test_acceptance
|
||||||
|
|
||||||
|
test_unit:
|
||||||
|
ifneq (,$(wildcard test/unit))
|
||||||
|
$(DOCKER_COMPOSE_TEST_UNIT) run --rm test_unit
|
||||||
|
$(MAKE) test_unit_clean
|
||||||
|
endif
|
||||||
|
|
||||||
|
test_clean: test_unit_clean
|
||||||
|
test_unit_clean:
|
||||||
|
ifneq (,$(wildcard test/unit))
|
||||||
|
$(DOCKER_COMPOSE_TEST_UNIT) down -v -t 0
|
||||||
|
endif
|
||||||
|
|
||||||
|
test_acceptance: test_acceptance_clean test_acceptance_pre_run test_acceptance_run
|
||||||
|
$(MAKE) test_acceptance_clean
|
||||||
|
|
||||||
|
test_acceptance_debug: test_acceptance_clean test_acceptance_pre_run test_acceptance_run_debug
|
||||||
|
$(MAKE) test_acceptance_clean
|
||||||
|
|
||||||
|
test_acceptance_run:
|
||||||
|
ifneq (,$(wildcard test/acceptance))
|
||||||
|
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance
|
||||||
|
endif
|
||||||
|
|
||||||
|
test_acceptance_run_debug:
|
||||||
|
ifneq (,$(wildcard test/acceptance))
|
||||||
|
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk
|
||||||
|
endif
|
||||||
|
|
||||||
|
test_clean: test_acceptance_clean
|
||||||
|
test_acceptance_clean:
|
||||||
|
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0
|
||||||
|
|
||||||
|
test_acceptance_pre_run:
|
||||||
|
ifneq (,$(wildcard test/acceptance/js/scripts/pre-run))
|
||||||
|
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run
|
||||||
|
endif
|
||||||
|
|
||||||
|
build:
|
||||||
|
docker build --pull --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
|
||||||
|
--tag gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
|
||||||
|
.
|
||||||
|
|
||||||
|
tar:
|
||||||
|
$(DOCKER_COMPOSE) up tar
|
||||||
|
|
||||||
|
publish:
|
||||||
|
|
||||||
|
docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||||
|
|
||||||
|
|
||||||
|
.PHONY: clean test test_unit test_acceptance test_clean build publish
|
||||||
51
README.md
51
README.md
@@ -1,30 +1,55 @@
|
|||||||
clsi-sharelatex
|
overleaf/clsi
|
||||||
===============
|
===============
|
||||||
|
|
||||||
|
**Note:** Original repo here: https://github.com/overleaf/clsi
|
||||||
|
|
||||||
A web api for compiling LaTeX documents in the cloud
|
A web api for compiling LaTeX documents in the cloud
|
||||||
|
|
||||||
[](https://travis-ci.org/sharelatex/clsi-sharelatex)
|
The Common LaTeX Service Interface (CLSI) provides a RESTful interface to traditional LaTeX tools (or, more generally, any command line tool for composing marked-up documents into a display format such as PDF or HTML). The CLSI listens on the following ports by default:
|
||||||
|
|
||||||
|
* TCP/3009 - the RESTful interface
|
||||||
|
* TCP/3048 - reports load information
|
||||||
|
* TCP/3049 - HTTP interface to control the CLSI service
|
||||||
|
|
||||||
|
These defaults can be modified in `config/settings.defaults.coffee`.
|
||||||
|
|
||||||
|
The provided `Dockerfile` builds a docker image which has the docker command line tools installed. The configuration in `docker-compose-config.yml` mounts the docker socket, in order that the CLSI container can talk to the docker host it is running in. This allows it to spin up `sibling containers` running an image with a TeX distribution installed to perform the actual compiles.
|
||||||
|
|
||||||
|
The CLSI can be configured through the following environment variables:
|
||||||
|
|
||||||
|
* `DOCKER_RUNNER` - Set to true to use sibling containers
|
||||||
|
* `SYNCTEX_BIN_HOST_PATH` - Path to SyncTeX binary
|
||||||
|
* `COMPILES_HOST_DIR` - Working directory for LaTeX compiles
|
||||||
|
* `SQLITE_PATH` - Path to SQLite database
|
||||||
|
* `TEXLIVE_IMAGE` - The TEXLIVE docker image to use for sibling containers, e.g. `gcr.io/overleaf-ops/texlive-full:2017.1`
|
||||||
|
* `TEXLIVE_IMAGE_USER` - When using sibling containers, the user to run as in the TEXLIVE image. Defaults to `tex`
|
||||||
|
* `TEX_LIVE_IMAGE_NAME_OVERRIDE` - The name of the registry for the docker image e.g. `gcr.io/overleaf-ops`
|
||||||
|
* `FILESTORE_DOMAIN_OVERRIDE` - The url for the filestore service e.g.`http://$FILESTORE_HOST:3009`
|
||||||
|
* `STATSD_HOST` - The address of the Statsd service (used by the metrics module)
|
||||||
|
* `LISTEN_ADDRESS` - The address for the RESTful service to listen on. Set to `0.0.0.0` to listen on all network interfaces
|
||||||
|
* `SMOKE_TEST` - Whether to run smoke tests
|
||||||
|
|
||||||
Installation
|
Installation
|
||||||
------------
|
------------
|
||||||
|
|
||||||
The CLSI can be installed and set up as part of the entire [ShareLaTeX stack](https://github.com/sharelatex/sharelatex) (complete with front end editor and document storage), or it can be run as a standalone service. To run is as a standalone service, first checkout this repository:
|
The CLSI can be installed and set up as part of the entire [Overleaf stack](https://github.com/overleaf/overleaf) (complete with front end editor and document storage), or it can be run as a standalone service. To run is as a standalone service, first checkout this repository:
|
||||||
|
|
||||||
$ git clone git@github.com:sharelatex/clsi-sharelatex.git
|
$ git clone git@github.com:overleaf/clsi.git
|
||||||
|
|
||||||
Then install the require npm modules:
|
Then install the require npm modules and run:
|
||||||
|
|
||||||
$ npm install
|
$ npm install
|
||||||
|
$ npm start
|
||||||
|
|
||||||
Then compile the coffee script source files:
|
|
||||||
|
|
||||||
$ grunt install
|
|
||||||
|
|
||||||
Finally, (after configuring your local database - see the Config section), run the CLSI service:
|
|
||||||
|
|
||||||
$ grunt run
|
|
||||||
|
|
||||||
The CLSI should then be running at http://localhost:3013.
|
The CLSI should then be running at http://localhost:3013.
|
||||||
|
|
||||||
|
**Note:** to install texlive-full on ubuntu:
|
||||||
|
$ sudo apt install texlive-full
|
||||||
|
|
||||||
|
Possible REST API clients to test are:
|
||||||
|
* Postman
|
||||||
|
* Insomnia
|
||||||
|
|
||||||
Config
|
Config
|
||||||
------
|
------
|
||||||
@@ -92,4 +117,4 @@ License
|
|||||||
|
|
||||||
The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. A copy can be found in the `LICENSE` file.
|
The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. A copy can be found in the `LICENSE` file.
|
||||||
|
|
||||||
Copyright (c) ShareLaTeX, 2014.
|
Copyright (c) Overleaf, 2014-2019.
|
||||||
|
|||||||
72
app.coffee
72
app.coffee
@@ -1,72 +0,0 @@
|
|||||||
CompileController = require "./app/js/CompileController"
|
|
||||||
Settings = require "settings-sharelatex"
|
|
||||||
logger = require "logger-sharelatex"
|
|
||||||
logger.initialize("clsi")
|
|
||||||
smokeTest = require "smoke-test-sharelatex"
|
|
||||||
|
|
||||||
Metrics = require "metrics-sharelatex"
|
|
||||||
Metrics.initialize("clsi")
|
|
||||||
Metrics.open_sockets.monitor(logger)
|
|
||||||
|
|
||||||
ProjectPersistenceManager = require "./app/js/ProjectPersistenceManager"
|
|
||||||
|
|
||||||
require("./app/js/db").sync()
|
|
||||||
|
|
||||||
express = require "express"
|
|
||||||
bodyParser = require "body-parser"
|
|
||||||
app = express()
|
|
||||||
|
|
||||||
app.use Metrics.http.monitor(logger)
|
|
||||||
|
|
||||||
# Compile requests can take longer than the default two
|
|
||||||
# minutes (including file download time), so bump up the
|
|
||||||
# timeout a bit.
|
|
||||||
TIMEOUT = threeMinutes = 3 * 60 * 1000
|
|
||||||
app.use (req, res, next) ->
|
|
||||||
req.setTimeout TIMEOUT
|
|
||||||
res.setTimeout TIMEOUT
|
|
||||||
next()
|
|
||||||
|
|
||||||
app.post "/project/:project_id/compile", bodyParser.json(limit: "5mb"), CompileController.compile
|
|
||||||
app.delete "/project/:project_id", CompileController.clearCache
|
|
||||||
|
|
||||||
app.get "/project/:project_id/sync/code", CompileController.syncFromCode
|
|
||||||
app.get "/project/:project_id/sync/pdf", CompileController.syncFromPdf
|
|
||||||
|
|
||||||
staticServer = express.static(Settings.path.compilesDir)
|
|
||||||
app.get "/project/:project_id/output/*", (req, res, next) ->
|
|
||||||
req.url = "/#{req.params.project_id}/#{req.params[0]}"
|
|
||||||
staticServer(req, res, next)
|
|
||||||
|
|
||||||
app.get "/status", (req, res, next) ->
|
|
||||||
res.send "CLSI is alive\n"
|
|
||||||
|
|
||||||
resCacher =
|
|
||||||
contentType:(@setContentType)->
|
|
||||||
send:(@code, @body)->
|
|
||||||
|
|
||||||
#default the server to be down
|
|
||||||
code:500
|
|
||||||
body:{}
|
|
||||||
setContentType:"application/json"
|
|
||||||
|
|
||||||
if Settings.smokeTest
|
|
||||||
do runSmokeTest = ->
|
|
||||||
logger.log("running smoke tests")
|
|
||||||
smokeTest.run(require.resolve(__dirname + "/test/smoke/js/SmokeTests.js"))({}, resCacher)
|
|
||||||
setTimeout(runSmokeTest, 20 * 1000)
|
|
||||||
|
|
||||||
app.get "/health_check", (req, res)->
|
|
||||||
res.contentType(resCacher?.setContentType)
|
|
||||||
res.send resCacher?.code, resCacher?.body
|
|
||||||
|
|
||||||
app.use (error, req, res, next) ->
|
|
||||||
logger.error err: error, "server error"
|
|
||||||
res.send 500
|
|
||||||
|
|
||||||
app.listen port = (Settings.internal?.clsi?.port or 3013), host = (Settings.internal?.clsi?.host or "localhost"), (error) ->
|
|
||||||
logger.log "CLSI listening on #{host}:#{port}"
|
|
||||||
|
|
||||||
setInterval () ->
|
|
||||||
ProjectPersistenceManager.clearExpiredProjects()
|
|
||||||
, tenMinutes = 10 * 60 * 1000
|
|
||||||
350
app.js
Normal file
350
app.js
Normal file
@@ -0,0 +1,350 @@
|
|||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS103: Rewrite code to no longer use __guard__
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
const tenMinutes = 10 * 60 * 1000
|
||||||
|
const Metrics = require('metrics-sharelatex')
|
||||||
|
Metrics.initialize('clsi')
|
||||||
|
|
||||||
|
const CompileController = require('./app/js/CompileController')
|
||||||
|
const Settings = require('settings-sharelatex')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
logger.initialize('clsi')
|
||||||
|
if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) {
|
||||||
|
logger.initializeErrorReporting(Settings.sentry.dsn)
|
||||||
|
}
|
||||||
|
|
||||||
|
const smokeTest = require('./test/smoke/js/SmokeTests')
|
||||||
|
const ContentTypeMapper = require('./app/js/ContentTypeMapper')
|
||||||
|
const Errors = require('./app/js/Errors')
|
||||||
|
|
||||||
|
const Path = require('path')
|
||||||
|
|
||||||
|
Metrics.open_sockets.monitor(logger)
|
||||||
|
Metrics.memory.monitor(logger)
|
||||||
|
|
||||||
|
const ProjectPersistenceManager = require('./app/js/ProjectPersistenceManager')
|
||||||
|
const OutputCacheManager = require('./app/js/OutputCacheManager')
|
||||||
|
|
||||||
|
require('./app/js/db').sync()
|
||||||
|
|
||||||
|
const express = require('express')
|
||||||
|
const bodyParser = require('body-parser')
|
||||||
|
const app = express()
|
||||||
|
|
||||||
|
Metrics.injectMetricsRoute(app)
|
||||||
|
app.use(Metrics.http.monitor(logger))
|
||||||
|
|
||||||
|
// Compile requests can take longer than the default two
|
||||||
|
// minutes (including file download time), so bump up the
|
||||||
|
// timeout a bit.
|
||||||
|
const TIMEOUT = 10 * 60 * 1000
|
||||||
|
app.use(function (req, res, next) {
|
||||||
|
req.setTimeout(TIMEOUT)
|
||||||
|
res.setTimeout(TIMEOUT)
|
||||||
|
res.removeHeader('X-Powered-By')
|
||||||
|
return next()
|
||||||
|
})
|
||||||
|
|
||||||
|
app.param('project_id', function (req, res, next, projectId) {
|
||||||
|
if (projectId != null ? projectId.match(/^[a-zA-Z0-9_-]+$/) : undefined) {
|
||||||
|
return next()
|
||||||
|
} else {
|
||||||
|
return next(new Error('invalid project id'))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
app.param('user_id', function (req, res, next, userId) {
|
||||||
|
if (userId != null ? userId.match(/^[0-9a-f]{24}$/) : undefined) {
|
||||||
|
return next()
|
||||||
|
} else {
|
||||||
|
return next(new Error('invalid user id'))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
app.param('build_id', function (req, res, next, buildId) {
|
||||||
|
if (
|
||||||
|
buildId != null ? buildId.match(OutputCacheManager.BUILD_REGEX) : undefined
|
||||||
|
) {
|
||||||
|
return next()
|
||||||
|
} else {
|
||||||
|
return next(new Error(`invalid build id ${buildId}`))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
app.post(
|
||||||
|
'/project/:project_id/compile',
|
||||||
|
bodyParser.json({ limit: Settings.compileSizeLimit }),
|
||||||
|
CompileController.compile
|
||||||
|
)
|
||||||
|
app.post('/project/:project_id/compile/stop', CompileController.stopCompile)
|
||||||
|
app.delete('/project/:project_id', CompileController.clearCache)
|
||||||
|
|
||||||
|
app.get('/project/:project_id/sync/code', CompileController.syncFromCode)
|
||||||
|
app.get('/project/:project_id/sync/pdf', CompileController.syncFromPdf)
|
||||||
|
app.get('/project/:project_id/wordcount', CompileController.wordcount)
|
||||||
|
app.get('/project/:project_id/status', CompileController.status)
|
||||||
|
|
||||||
|
// Per-user containers
|
||||||
|
app.post(
|
||||||
|
'/project/:project_id/user/:user_id/compile',
|
||||||
|
bodyParser.json({ limit: Settings.compileSizeLimit }),
|
||||||
|
CompileController.compile
|
||||||
|
)
|
||||||
|
app.post(
|
||||||
|
'/project/:project_id/user/:user_id/compile/stop',
|
||||||
|
CompileController.stopCompile
|
||||||
|
)
|
||||||
|
app.delete('/project/:project_id/user/:user_id', CompileController.clearCache)
|
||||||
|
|
||||||
|
app.get(
|
||||||
|
'/project/:project_id/user/:user_id/sync/code',
|
||||||
|
CompileController.syncFromCode
|
||||||
|
)
|
||||||
|
app.get(
|
||||||
|
'/project/:project_id/user/:user_id/sync/pdf',
|
||||||
|
CompileController.syncFromPdf
|
||||||
|
)
|
||||||
|
app.get(
|
||||||
|
'/project/:project_id/user/:user_id/wordcount',
|
||||||
|
CompileController.wordcount
|
||||||
|
)
|
||||||
|
|
||||||
|
const ForbidSymlinks = require('./app/js/StaticServerForbidSymlinks')
|
||||||
|
|
||||||
|
// create a static server which does not allow access to any symlinks
|
||||||
|
// avoids possible mismatch of root directory between middleware check
|
||||||
|
// and serving the files
|
||||||
|
const staticServer = ForbidSymlinks(express.static, Settings.path.compilesDir, {
|
||||||
|
setHeaders(res, path, stat) {
|
||||||
|
if (Path.basename(path) === 'output.pdf') {
|
||||||
|
// Calculate an etag in the same way as nginx
|
||||||
|
// https://github.com/tj/send/issues/65
|
||||||
|
const etag = (path, stat) =>
|
||||||
|
`"${Math.ceil(+stat.mtime / 1000).toString(16)}` +
|
||||||
|
'-' +
|
||||||
|
Number(stat.size).toString(16) +
|
||||||
|
'"'
|
||||||
|
res.set('Etag', etag(path, stat))
|
||||||
|
}
|
||||||
|
return res.set('Content-Type', ContentTypeMapper.map(path))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
app.get(
|
||||||
|
'/project/:project_id/user/:user_id/build/:build_id/output/*',
|
||||||
|
function (req, res, next) {
|
||||||
|
// for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
|
||||||
|
req.url =
|
||||||
|
`/${req.params.project_id}-${req.params.user_id}/` +
|
||||||
|
OutputCacheManager.path(req.params.build_id, `/${req.params[0]}`)
|
||||||
|
return staticServer(req, res, next)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
app.get('/project/:project_id/build/:build_id/output/*', function (
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
next
|
||||||
|
) {
|
||||||
|
// for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
|
||||||
|
req.url =
|
||||||
|
`/${req.params.project_id}/` +
|
||||||
|
OutputCacheManager.path(req.params.build_id, `/${req.params[0]}`)
|
||||||
|
return staticServer(req, res, next)
|
||||||
|
})
|
||||||
|
|
||||||
|
app.get('/project/:project_id/user/:user_id/output/*', function (
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
next
|
||||||
|
) {
|
||||||
|
// for specific user get the path to the top level file
|
||||||
|
req.url = `/${req.params.project_id}-${req.params.user_id}/${req.params[0]}`
|
||||||
|
return staticServer(req, res, next)
|
||||||
|
})
|
||||||
|
|
||||||
|
app.get('/project/:project_id/output/*', function (req, res, next) {
|
||||||
|
if (
|
||||||
|
(req.query != null ? req.query.build : undefined) != null &&
|
||||||
|
req.query.build.match(OutputCacheManager.BUILD_REGEX)
|
||||||
|
) {
|
||||||
|
// for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
|
||||||
|
req.url =
|
||||||
|
`/${req.params.project_id}/` +
|
||||||
|
OutputCacheManager.path(req.query.build, `/${req.params[0]}`)
|
||||||
|
} else {
|
||||||
|
req.url = `/${req.params.project_id}/${req.params[0]}`
|
||||||
|
}
|
||||||
|
return staticServer(req, res, next)
|
||||||
|
})
|
||||||
|
|
||||||
|
app.get('/oops', function (req, res, next) {
|
||||||
|
logger.error({ err: 'hello' }, 'test error')
|
||||||
|
return res.send('error\n')
|
||||||
|
})
|
||||||
|
|
||||||
|
app.get('/status', (req, res, next) => res.send('CLSI is alive\n'))
|
||||||
|
|
||||||
|
Settings.processTooOld = false
|
||||||
|
if (Settings.processLifespanLimitMs) {
|
||||||
|
Settings.processLifespanLimitMs +=
|
||||||
|
Settings.processLifespanLimitMs * (Math.random() / 10)
|
||||||
|
logger.info(
|
||||||
|
'Lifespan limited to ',
|
||||||
|
Date.now() + Settings.processLifespanLimitMs
|
||||||
|
)
|
||||||
|
|
||||||
|
setTimeout(() => {
|
||||||
|
logger.log('shutting down, process is too old')
|
||||||
|
Settings.processTooOld = true
|
||||||
|
}, Settings.processLifespanLimitMs)
|
||||||
|
}
|
||||||
|
|
||||||
|
function runSmokeTest() {
|
||||||
|
if (Settings.processTooOld) return
|
||||||
|
logger.log('running smoke tests')
|
||||||
|
smokeTest.triggerRun((err) => {
|
||||||
|
if (err) logger.error({ err }, 'smoke tests failed')
|
||||||
|
setTimeout(runSmokeTest, 30 * 1000)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (Settings.smokeTest) {
|
||||||
|
runSmokeTest()
|
||||||
|
}
|
||||||
|
|
||||||
|
app.get('/health_check', function (req, res) {
|
||||||
|
if (Settings.processTooOld) {
|
||||||
|
return res.status(500).json({ processTooOld: true })
|
||||||
|
}
|
||||||
|
smokeTest.sendLastResult(res)
|
||||||
|
})
|
||||||
|
|
||||||
|
app.get('/smoke_test_force', (req, res) => smokeTest.sendNewResult(res))
|
||||||
|
|
||||||
|
app.use(function (error, req, res, next) {
|
||||||
|
if (error instanceof Errors.NotFoundError) {
|
||||||
|
logger.log({ err: error, url: req.url }, 'not found error')
|
||||||
|
return res.sendStatus(404)
|
||||||
|
} else if (error.code === 'EPIPE') {
|
||||||
|
// inspect container returns EPIPE when shutting down
|
||||||
|
return res.sendStatus(503) // send 503 Unavailable response
|
||||||
|
} else {
|
||||||
|
logger.error({ err: error, url: req.url }, 'server error')
|
||||||
|
return res.sendStatus((error != null ? error.statusCode : undefined) || 500)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
const net = require('net')
|
||||||
|
const os = require('os')
|
||||||
|
|
||||||
|
let STATE = 'up'
|
||||||
|
|
||||||
|
const loadTcpServer = net.createServer(function (socket) {
|
||||||
|
socket.on('error', function (err) {
|
||||||
|
if (err.code === 'ECONNRESET') {
|
||||||
|
// this always comes up, we don't know why
|
||||||
|
return
|
||||||
|
}
|
||||||
|
logger.err({ err }, 'error with socket on load check')
|
||||||
|
return socket.destroy()
|
||||||
|
})
|
||||||
|
|
||||||
|
if (STATE === 'up' && Settings.internal.load_balancer_agent.report_load) {
|
||||||
|
let availableWorkingCpus
|
||||||
|
const currentLoad = os.loadavg()[0]
|
||||||
|
|
||||||
|
// staging clis's have 1 cpu core only
|
||||||
|
if (os.cpus().length === 1) {
|
||||||
|
availableWorkingCpus = 1
|
||||||
|
} else {
|
||||||
|
availableWorkingCpus = os.cpus().length - 1
|
||||||
|
}
|
||||||
|
|
||||||
|
const freeLoad = availableWorkingCpus - currentLoad
|
||||||
|
let freeLoadPercentage = Math.round((freeLoad / availableWorkingCpus) * 100)
|
||||||
|
if (freeLoadPercentage <= 0) {
|
||||||
|
freeLoadPercentage = 1 // when its 0 the server is set to drain and will move projects to different servers
|
||||||
|
}
|
||||||
|
socket.write(`up, ${freeLoadPercentage}%\n`, 'ASCII')
|
||||||
|
return socket.end()
|
||||||
|
} else {
|
||||||
|
socket.write(`${STATE}\n`, 'ASCII')
|
||||||
|
return socket.end()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
const loadHttpServer = express()
|
||||||
|
|
||||||
|
loadHttpServer.post('/state/up', function (req, res, next) {
|
||||||
|
STATE = 'up'
|
||||||
|
logger.info('getting message to set server to down')
|
||||||
|
return res.sendStatus(204)
|
||||||
|
})
|
||||||
|
|
||||||
|
loadHttpServer.post('/state/down', function (req, res, next) {
|
||||||
|
STATE = 'down'
|
||||||
|
logger.info('getting message to set server to down')
|
||||||
|
return res.sendStatus(204)
|
||||||
|
})
|
||||||
|
|
||||||
|
loadHttpServer.post('/state/maint', function (req, res, next) {
|
||||||
|
STATE = 'maint'
|
||||||
|
logger.info('getting message to set server to maint')
|
||||||
|
return res.sendStatus(204)
|
||||||
|
})
|
||||||
|
|
||||||
|
const port =
|
||||||
|
__guard__(
|
||||||
|
Settings.internal != null ? Settings.internal.clsi : undefined,
|
||||||
|
(x) => x.port
|
||||||
|
) || 3013
|
||||||
|
const host =
|
||||||
|
__guard__(
|
||||||
|
Settings.internal != null ? Settings.internal.clsi : undefined,
|
||||||
|
(x1) => x1.host
|
||||||
|
) || 'localhost'
|
||||||
|
|
||||||
|
const loadTcpPort = Settings.internal.load_balancer_agent.load_port
|
||||||
|
const loadHttpPort = Settings.internal.load_balancer_agent.local_port
|
||||||
|
|
||||||
|
if (!module.parent) {
|
||||||
|
// Called directly
|
||||||
|
app.listen(port, host, (error) => {
|
||||||
|
if (error) {
|
||||||
|
logger.fatal({ error }, `Error starting CLSI on ${host}:${port}`)
|
||||||
|
} else {
|
||||||
|
logger.info(`CLSI starting up, listening on ${host}:${port}`)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
loadTcpServer.listen(loadTcpPort, host, function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
return logger.info(`Load tcp agent listening on load port ${loadTcpPort}`)
|
||||||
|
})
|
||||||
|
|
||||||
|
loadHttpServer.listen(loadHttpPort, host, function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
return logger.info(`Load http agent listening on load port ${loadHttpPort}`)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = app
|
||||||
|
|
||||||
|
setInterval(() => {
|
||||||
|
ProjectPersistenceManager.refreshExpiryTimeout()
|
||||||
|
ProjectPersistenceManager.clearExpiredProjects()
|
||||||
|
}, tenMinutes)
|
||||||
|
|
||||||
|
function __guard__(value, transform) {
|
||||||
|
return typeof value !== 'undefined' && value !== null
|
||||||
|
? transform(value)
|
||||||
|
: undefined
|
||||||
|
}
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
spawn = require("child_process").spawn
|
|
||||||
logger = require "logger-sharelatex"
|
|
||||||
|
|
||||||
module.exports = CommandRunner =
|
|
||||||
run: (project_id, command, directory, timeout, callback = (error) ->) ->
|
|
||||||
command = (arg.replace('$COMPILE_DIR', directory) for arg in command)
|
|
||||||
logger.log project_id: project_id, command: command, directory: directory, "running command"
|
|
||||||
logger.warn "timeouts and sandboxing are not enabled with CommandRunner"
|
|
||||||
|
|
||||||
proc = spawn command[0], command.slice(1), stdio: "inherit", cwd: directory
|
|
||||||
proc.on "close", () ->
|
|
||||||
callback()
|
|
||||||
@@ -1,67 +0,0 @@
|
|||||||
RequestParser = require "./RequestParser"
|
|
||||||
CompileManager = require "./CompileManager"
|
|
||||||
Settings = require "settings-sharelatex"
|
|
||||||
Metrics = require "./Metrics"
|
|
||||||
ProjectPersistenceManager = require "./ProjectPersistenceManager"
|
|
||||||
logger = require "logger-sharelatex"
|
|
||||||
|
|
||||||
module.exports = CompileController =
|
|
||||||
compile: (req, res, next = (error) ->) ->
|
|
||||||
timer = new Metrics.Timer("compile-request")
|
|
||||||
RequestParser.parse req.body, (error, request) ->
|
|
||||||
return next(error) if error?
|
|
||||||
request.project_id = req.params.project_id
|
|
||||||
ProjectPersistenceManager.markProjectAsJustAccessed request.project_id, (error) ->
|
|
||||||
return next(error) if error?
|
|
||||||
CompileManager.doCompile request, (error, outputFiles = []) ->
|
|
||||||
if error?
|
|
||||||
logger.error err: error, project_id: request.project_id, "error running compile"
|
|
||||||
if error.timedout
|
|
||||||
status = "timedout"
|
|
||||||
else
|
|
||||||
status = "error"
|
|
||||||
code = 500
|
|
||||||
else
|
|
||||||
status = "failure"
|
|
||||||
for file in outputFiles
|
|
||||||
if file.path?.match(/output\.pdf$/)
|
|
||||||
status = "success"
|
|
||||||
|
|
||||||
timer.done()
|
|
||||||
res.send (code or 200), {
|
|
||||||
compile:
|
|
||||||
status: status
|
|
||||||
error: error?.message or error
|
|
||||||
outputFiles: outputFiles.map (file) ->
|
|
||||||
url: "#{Settings.apis.clsi.url}/project/#{request.project_id}/output/#{file.path}"
|
|
||||||
type: file.type
|
|
||||||
}
|
|
||||||
|
|
||||||
clearCache: (req, res, next = (error) ->) ->
|
|
||||||
ProjectPersistenceManager.clearProject req.params.project_id, (error) ->
|
|
||||||
return next(error) if error?
|
|
||||||
res.send 204 # No content
|
|
||||||
|
|
||||||
syncFromCode: (req, res, next = (error) ->) ->
|
|
||||||
file = req.query.file
|
|
||||||
line = parseInt(req.query.line, 10)
|
|
||||||
column = parseInt(req.query.column, 10)
|
|
||||||
project_id = req.params.project_id
|
|
||||||
|
|
||||||
CompileManager.syncFromCode project_id, file, line, column, (error, pdfPositions) ->
|
|
||||||
return next(error) if error?
|
|
||||||
res.send JSON.stringify {
|
|
||||||
pdf: pdfPositions
|
|
||||||
}
|
|
||||||
|
|
||||||
syncFromPdf: (req, res, next = (error) ->) ->
|
|
||||||
page = parseInt(req.query.page, 10)
|
|
||||||
h = parseFloat(req.query.h)
|
|
||||||
v = parseFloat(req.query.v)
|
|
||||||
project_id = req.params.project_id
|
|
||||||
|
|
||||||
CompileManager.syncFromPdf project_id, page, h, v, (error, codePositions) ->
|
|
||||||
return next(error) if error?
|
|
||||||
res.send JSON.stringify {
|
|
||||||
code: codePositions
|
|
||||||
}
|
|
||||||
@@ -1,108 +0,0 @@
|
|||||||
ResourceWriter = require "./ResourceWriter"
|
|
||||||
LatexRunner = require "./LatexRunner"
|
|
||||||
OutputFileFinder = require "./OutputFileFinder"
|
|
||||||
Settings = require("settings-sharelatex")
|
|
||||||
Path = require "path"
|
|
||||||
logger = require "logger-sharelatex"
|
|
||||||
Metrics = require "./Metrics"
|
|
||||||
child_process = require "child_process"
|
|
||||||
|
|
||||||
module.exports = CompileManager =
|
|
||||||
doCompile: (request, callback = (error, outputFiles) ->) ->
|
|
||||||
compileDir = Path.join(Settings.path.compilesDir, request.project_id)
|
|
||||||
|
|
||||||
timer = new Metrics.Timer("write-to-disk")
|
|
||||||
logger.log project_id: request.project_id, "starting compile"
|
|
||||||
ResourceWriter.syncResourcesToDisk request.project_id, request.resources, compileDir, (error) ->
|
|
||||||
return callback(error) if error?
|
|
||||||
logger.log project_id: request.project_id, time_taken: Date.now() - timer.start, "written files to disk"
|
|
||||||
timer.done()
|
|
||||||
|
|
||||||
timer = new Metrics.Timer("run-compile")
|
|
||||||
Metrics.inc("compiles")
|
|
||||||
LatexRunner.runLatex request.project_id, {
|
|
||||||
directory: compileDir
|
|
||||||
mainFile: request.rootResourcePath
|
|
||||||
compiler: request.compiler
|
|
||||||
timeout: request.timeout
|
|
||||||
}, (error) ->
|
|
||||||
return callback(error) if error?
|
|
||||||
logger.log project_id: request.project_id, time_taken: Date.now() - timer.start, "done compile"
|
|
||||||
timer.done()
|
|
||||||
|
|
||||||
OutputFileFinder.findOutputFiles request.resources, compileDir, (error, outputFiles) ->
|
|
||||||
return callback(error) if error?
|
|
||||||
callback null, outputFiles
|
|
||||||
|
|
||||||
clearProject: (project_id, _callback = (error) ->) ->
|
|
||||||
callback = (error) ->
|
|
||||||
_callback(error)
|
|
||||||
_callback = () ->
|
|
||||||
|
|
||||||
compileDir = Path.join(Settings.path.compilesDir, project_id)
|
|
||||||
proc = child_process.spawn "rm", ["-r", compileDir]
|
|
||||||
|
|
||||||
proc.on "error", callback
|
|
||||||
|
|
||||||
stderr = ""
|
|
||||||
proc.stderr.on "data", (chunk) -> stderr += chunk.toString()
|
|
||||||
|
|
||||||
proc.on "close", (code) ->
|
|
||||||
if code == 0
|
|
||||||
return callback(null)
|
|
||||||
else
|
|
||||||
return callback(new Error("rm -r #{compileDir} failed: #{stderr}"))
|
|
||||||
|
|
||||||
syncFromCode: (project_id, file_name, line, column, callback = (error, pdfPositions) ->) ->
|
|
||||||
# If LaTeX was run in a virtual environment, the file path that synctex expects
|
|
||||||
# might not match the file path on the host. The .synctex.gz file however, will be accessed
|
|
||||||
# wherever it is on the host.
|
|
||||||
base_dir = Settings.path.synctexBaseDir(project_id)
|
|
||||||
file_path = base_dir + "/" + file_name
|
|
||||||
synctex_path = Path.join(Settings.path.compilesDir, project_id, "output.pdf")
|
|
||||||
CompileManager._runSynctex ["code", synctex_path, file_path, line, column], (error, stdout) ->
|
|
||||||
return callback(error) if error?
|
|
||||||
logger.log project_id: project_id, file_name: file_name, line: line, column: column, stdout: stdout, "synctex code output"
|
|
||||||
callback null, CompileManager._parseSynctexFromCodeOutput(stdout)
|
|
||||||
|
|
||||||
syncFromPdf: (project_id, page, h, v, callback = (error, filePositions) ->) ->
|
|
||||||
base_dir = Settings.path.synctexBaseDir(project_id)
|
|
||||||
synctex_path = Path.join(Settings.path.compilesDir, project_id, "output.pdf")
|
|
||||||
CompileManager._runSynctex ["pdf", synctex_path, page, h, v], (error, stdout) ->
|
|
||||||
return callback(error) if error?
|
|
||||||
logger.log project_id: project_id, page: page, h: h, v:v, stdout: stdout, "synctex pdf output"
|
|
||||||
callback null, CompileManager._parseSynctexFromPdfOutput(stdout, base_dir)
|
|
||||||
|
|
||||||
_runSynctex: (args, callback = (error, stdout) ->) ->
|
|
||||||
bin_path = Path.resolve(__dirname + "/../../bin/synctex")
|
|
||||||
seconds = 1000
|
|
||||||
child_process.execFile bin_path, args, timeout: 10 * seconds, (error, stdout, stderr) ->
|
|
||||||
return callback(error) if error?
|
|
||||||
callback(null, stdout)
|
|
||||||
|
|
||||||
_parseSynctexFromCodeOutput: (output) ->
|
|
||||||
results = []
|
|
||||||
for line in output.split("\n")
|
|
||||||
[node, page, h, v, width, height] = line.split("\t")
|
|
||||||
if node == "NODE"
|
|
||||||
results.push {
|
|
||||||
page: parseInt(page, 10)
|
|
||||||
h: parseFloat(h)
|
|
||||||
v: parseFloat(v)
|
|
||||||
height: parseFloat(height)
|
|
||||||
width: parseFloat(width)
|
|
||||||
}
|
|
||||||
return results
|
|
||||||
|
|
||||||
_parseSynctexFromPdfOutput: (output, base_dir) ->
|
|
||||||
results = []
|
|
||||||
for line in output.split("\n")
|
|
||||||
[node, file_path, line, column] = line.split("\t")
|
|
||||||
if node == "NODE"
|
|
||||||
file = file_path.slice(base_dir.length + 1)
|
|
||||||
results.push {
|
|
||||||
file: file
|
|
||||||
line: parseInt(line, 10)
|
|
||||||
column: parseInt(column, 10)
|
|
||||||
}
|
|
||||||
return results
|
|
||||||
@@ -1,57 +0,0 @@
|
|||||||
Path = require "path"
|
|
||||||
Settings = require "settings-sharelatex"
|
|
||||||
logger = require "logger-sharelatex"
|
|
||||||
Metrics = require "./Metrics"
|
|
||||||
CommandRunner = require(Settings.clsi?.commandRunner or "./CommandRunner")
|
|
||||||
|
|
||||||
module.exports = LatexRunner =
|
|
||||||
runLatex: (project_id, options, callback = (error) ->) ->
|
|
||||||
{directory, mainFile, compiler, timeout} = options
|
|
||||||
compiler ||= "pdflatex"
|
|
||||||
timeout ||= 60000 # milliseconds
|
|
||||||
|
|
||||||
logger.log directory: directory, compiler: compiler, timeout: timeout, mainFile: mainFile, "starting compile"
|
|
||||||
|
|
||||||
# We want to run latexmk on the tex file which we will automatically
|
|
||||||
# generate from the Rtex/Rmd/md file.
|
|
||||||
mainFile = mainFile.replace(/\.(Rtex|md|Rmd)$/, ".tex")
|
|
||||||
|
|
||||||
if compiler == "pdflatex"
|
|
||||||
command = LatexRunner._pdflatexCommand mainFile
|
|
||||||
else if compiler == "latex"
|
|
||||||
command = LatexRunner._latexCommand mainFile
|
|
||||||
else if compiler == "xelatex"
|
|
||||||
command = LatexRunner._xelatexCommand mainFile
|
|
||||||
else if compiler == "lualatex"
|
|
||||||
command = LatexRunner._lualatexCommand mainFile
|
|
||||||
else
|
|
||||||
return callback new Error("unknown compiler: #{compiler}")
|
|
||||||
|
|
||||||
CommandRunner.run project_id, command, directory, timeout, callback
|
|
||||||
|
|
||||||
_latexmkBaseCommand: [ "latexmk", "-cd", "-f", "-jobname=output", "-auxdir=$COMPILE_DIR", "-outdir=$COMPILE_DIR"]
|
|
||||||
|
|
||||||
_pdflatexCommand: (mainFile) ->
|
|
||||||
LatexRunner._latexmkBaseCommand.concat [
|
|
||||||
"-pdf", "-e", "$pdflatex='pdflatex -synctex=1 -interaction=batchmode %O %S'",
|
|
||||||
Path.join("$COMPILE_DIR", mainFile)
|
|
||||||
]
|
|
||||||
|
|
||||||
_latexCommand: (mainFile) ->
|
|
||||||
LatexRunner._latexmkBaseCommand.concat [
|
|
||||||
"-pdfdvi", "-e", "$latex='latex -synctex=1 -interaction=batchmode %O %S'",
|
|
||||||
Path.join("$COMPILE_DIR", mainFile)
|
|
||||||
]
|
|
||||||
|
|
||||||
_xelatexCommand: (mainFile) ->
|
|
||||||
LatexRunner._latexmkBaseCommand.concat [
|
|
||||||
"-xelatex", "-e", "$pdflatex='xelatex -synctex=1 -interaction=batchmode %O %S'",
|
|
||||||
Path.join("$COMPILE_DIR", mainFile)
|
|
||||||
]
|
|
||||||
|
|
||||||
_lualatexCommand: (mainFile) ->
|
|
||||||
LatexRunner._latexmkBaseCommand.concat [
|
|
||||||
"-pdf", "-e", "$pdflatex='lualatex -synctex=1 -interaction=batchmode %O %S'",
|
|
||||||
Path.join("$COMPILE_DIR", mainFile)
|
|
||||||
]
|
|
||||||
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
module.exports = require "metrics-sharelatex"
|
|
||||||
|
|
||||||
@@ -1,58 +0,0 @@
|
|||||||
async = require "async"
|
|
||||||
fs = require "fs"
|
|
||||||
Path = require "path"
|
|
||||||
wrench = require "wrench"
|
|
||||||
|
|
||||||
module.exports = OutputFileFinder =
|
|
||||||
findOutputFiles: (resources, directory, callback = (error, outputFiles) ->) ->
|
|
||||||
incomingResources = {}
|
|
||||||
for resource in resources
|
|
||||||
incomingResources[resource.path] = true
|
|
||||||
|
|
||||||
OutputFileFinder._getAllFiles directory, (error, allFiles) ->
|
|
||||||
jobs = []
|
|
||||||
outputFiles = []
|
|
||||||
for file in allFiles
|
|
||||||
do (file) ->
|
|
||||||
jobs.push (callback) ->
|
|
||||||
if incomingResources[file.path]
|
|
||||||
return callback()
|
|
||||||
else
|
|
||||||
OutputFileFinder._isDirectory Path.join(directory, file.path), (error, directory) ->
|
|
||||||
return callback(error) if error?
|
|
||||||
if !directory
|
|
||||||
outputFiles.push file
|
|
||||||
callback()
|
|
||||||
|
|
||||||
async.series jobs, (error) ->
|
|
||||||
return callback(error) if error?
|
|
||||||
callback null, outputFiles
|
|
||||||
|
|
||||||
_isDirectory: (path, callback = (error, directory) ->) ->
|
|
||||||
fs.stat path, (error, stat) ->
|
|
||||||
callback error, stat?.isDirectory()
|
|
||||||
|
|
||||||
_getAllFiles: (directory, _callback = (error, outputFiles) ->) ->
|
|
||||||
callback = (error, outputFiles) ->
|
|
||||||
_callback(error, outputFiles)
|
|
||||||
_callback = () ->
|
|
||||||
|
|
||||||
outputFiles = []
|
|
||||||
|
|
||||||
wrench.readdirRecursive directory, (error, files) =>
|
|
||||||
if error?
|
|
||||||
if error.code == "ENOENT"
|
|
||||||
# Directory doesn't exist, which is not a problem
|
|
||||||
return callback(null, [])
|
|
||||||
else
|
|
||||||
return callback(error)
|
|
||||||
|
|
||||||
# readdirRecursive returns multiple times and finishes with a null response
|
|
||||||
if !files?
|
|
||||||
return callback(null, outputFiles)
|
|
||||||
|
|
||||||
for file in files
|
|
||||||
outputFiles.push
|
|
||||||
path: file
|
|
||||||
type: file.match(/\.([^\.]+)$/)?[1]
|
|
||||||
|
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
UrlCache = require "./UrlCache"
|
|
||||||
CompileManager = require "./CompileManager"
|
|
||||||
db = require "./db"
|
|
||||||
async = require "async"
|
|
||||||
logger = require "logger-sharelatex"
|
|
||||||
|
|
||||||
module.exports = ProjectPersistenceManager =
|
|
||||||
EXPIRY_TIMEOUT: oneDay = 24 * 60 * 60 * 1000 #ms
|
|
||||||
|
|
||||||
markProjectAsJustAccessed: (project_id, callback = (error) ->) ->
|
|
||||||
db.Project.findOrCreate(project_id: project_id)
|
|
||||||
.success(
|
|
||||||
(project) ->
|
|
||||||
project.updateAttributes(lastAccessed: new Date())
|
|
||||||
.success(() -> callback())
|
|
||||||
.error callback
|
|
||||||
)
|
|
||||||
.error callback
|
|
||||||
|
|
||||||
clearExpiredProjects: (callback = (error) ->) ->
|
|
||||||
ProjectPersistenceManager._findExpiredProjectIds (error, project_ids) ->
|
|
||||||
return callback(error) if error?
|
|
||||||
logger.log project_ids: project_ids, "clearing expired projects"
|
|
||||||
jobs = for project_id in (project_ids or [])
|
|
||||||
do (project_id) ->
|
|
||||||
(callback) ->
|
|
||||||
ProjectPersistenceManager.clearProject project_id, (err) ->
|
|
||||||
if err?
|
|
||||||
logger.error err: err, project_id: project_id, "error clearing project"
|
|
||||||
callback()
|
|
||||||
async.series jobs, callback
|
|
||||||
|
|
||||||
clearProject: (project_id, callback = (error) ->) ->
|
|
||||||
logger.log project_id: project_id, "clearing project"
|
|
||||||
CompileManager.clearProject project_id, (error) ->
|
|
||||||
return callback(error) if error?
|
|
||||||
UrlCache.clearProject project_id, (error) ->
|
|
||||||
return callback(error) if error?
|
|
||||||
ProjectPersistenceManager._clearProjectFromDatabase project_id, (error) ->
|
|
||||||
return callback(error) if error?
|
|
||||||
callback()
|
|
||||||
|
|
||||||
_clearProjectFromDatabase: (project_id, callback = (error) ->) ->
|
|
||||||
db.Project.destroy(project_id: project_id)
|
|
||||||
.success(() -> callback())
|
|
||||||
.error callback
|
|
||||||
|
|
||||||
_findExpiredProjectIds: (callback = (error, project_ids) ->) ->
|
|
||||||
db.Project.findAll(where: ["lastAccessed < ?", new Date(Date.now() - ProjectPersistenceManager.EXPIRY_TIMEOUT)])
|
|
||||||
.success(
|
|
||||||
(projects) ->
|
|
||||||
callback null, projects.map((project) -> project.project_id)
|
|
||||||
)
|
|
||||||
.error callback
|
|
||||||
@@ -1,74 +0,0 @@
|
|||||||
module.exports = RequestParser =
|
|
||||||
VALID_COMPILERS: ["pdflatex", "latex", "xelatex", "lualatex"]
|
|
||||||
MAX_TIMEOUT: 60
|
|
||||||
|
|
||||||
parse: (body, callback = (error, data) ->) ->
|
|
||||||
response = {}
|
|
||||||
|
|
||||||
if !body.compile?
|
|
||||||
return callback "top level object should have a compile attribute"
|
|
||||||
|
|
||||||
compile = body.compile
|
|
||||||
compile.options ||= {}
|
|
||||||
|
|
||||||
try
|
|
||||||
response.compiler = @_parseAttribute "compiler",
|
|
||||||
compile.options.compiler,
|
|
||||||
validValues: @VALID_COMPILERS
|
|
||||||
default: "pdflatex"
|
|
||||||
type: "string"
|
|
||||||
response.timeout = @_parseAttribute "timeout",
|
|
||||||
compile.options.timeout
|
|
||||||
default: RequestParser.MAX_TIMEOUT
|
|
||||||
type: "number"
|
|
||||||
|
|
||||||
if response.timeout > RequestParser.MAX_TIMEOUT
|
|
||||||
response.timeout = RequestParser.MAX_TIMEOUT
|
|
||||||
response.timeout = response.timeout * 1000 # milliseconds
|
|
||||||
|
|
||||||
response.resources = (@_parseResource(resource) for resource in (compile.resources or []))
|
|
||||||
response.rootResourcePath = @_parseAttribute "rootResourcePath",
|
|
||||||
compile.rootResourcePath
|
|
||||||
default: "main.tex"
|
|
||||||
type: "string"
|
|
||||||
catch error
|
|
||||||
return callback error
|
|
||||||
|
|
||||||
callback null, response
|
|
||||||
|
|
||||||
_parseResource: (resource) ->
|
|
||||||
if !resource.path? or typeof resource.path != "string"
|
|
||||||
throw "all resources should have a path attribute"
|
|
||||||
|
|
||||||
if resource.modified?
|
|
||||||
modified = new Date(resource.modified)
|
|
||||||
if isNaN(modified.getTime())
|
|
||||||
throw "resource modified date could not be understood: #{resource.modified}"
|
|
||||||
|
|
||||||
if !resource.url? and !resource.content?
|
|
||||||
throw "all resources should have either a url or content attribute"
|
|
||||||
if resource.content? and typeof resource.content != "string"
|
|
||||||
throw "content attribute should be a string"
|
|
||||||
if resource.url? and typeof resource.url != "string"
|
|
||||||
throw "url attribute should be a string"
|
|
||||||
|
|
||||||
return {
|
|
||||||
path: resource.path
|
|
||||||
modified: modified
|
|
||||||
url: resource.url
|
|
||||||
content: resource.content
|
|
||||||
}
|
|
||||||
|
|
||||||
_parseAttribute: (name, attribute, options) ->
|
|
||||||
if attribute?
|
|
||||||
if options.validValues?
|
|
||||||
if options.validValues.indexOf(attribute) == -1
|
|
||||||
throw "#{name} attribute should be one of: #{options.validValues.join(", ")}"
|
|
||||||
if options.type?
|
|
||||||
if typeof attribute != options.type
|
|
||||||
throw "#{name} attribute should be a #{options.type}"
|
|
||||||
else
|
|
||||||
return options.default if options.default?
|
|
||||||
throw "Default not implemented"
|
|
||||||
return attribute
|
|
||||||
|
|
||||||
@@ -1,68 +0,0 @@
|
|||||||
UrlCache = require "./UrlCache"
|
|
||||||
Path = require "path"
|
|
||||||
fs = require "fs"
|
|
||||||
async = require "async"
|
|
||||||
mkdirp = require "mkdirp"
|
|
||||||
OutputFileFinder = require "./OutputFileFinder"
|
|
||||||
Metrics = require "./Metrics"
|
|
||||||
|
|
||||||
module.exports = ResourceWriter =
|
|
||||||
syncResourcesToDisk: (project_id, resources, basePath, callback = (error) ->) ->
|
|
||||||
@_removeExtraneousFiles resources, basePath, (error) =>
|
|
||||||
return callback(error) if error?
|
|
||||||
jobs = for resource in resources
|
|
||||||
do (resource) =>
|
|
||||||
(callback) => @_writeResourceToDisk(project_id, resource, basePath, callback)
|
|
||||||
async.series jobs, callback
|
|
||||||
|
|
||||||
_removeExtraneousFiles: (resources, basePath, _callback = (error) ->) ->
|
|
||||||
timer = new Metrics.Timer("unlink-output-files")
|
|
||||||
callback = (error) ->
|
|
||||||
timer.done()
|
|
||||||
_callback(error)
|
|
||||||
|
|
||||||
OutputFileFinder.findOutputFiles resources, basePath, (error, outputFiles) ->
|
|
||||||
return callback(error) if error?
|
|
||||||
|
|
||||||
jobs = []
|
|
||||||
for file in outputFiles or []
|
|
||||||
do (file) ->
|
|
||||||
path = file.path
|
|
||||||
should_delete = true
|
|
||||||
if path.match(/^output\./) or path.match(/\.aux$/)
|
|
||||||
should_delete = false
|
|
||||||
if path == "output.pdf" or path == "output.dvi" or path == "output.log"
|
|
||||||
should_delete = true
|
|
||||||
if should_delete
|
|
||||||
jobs.push (callback) -> ResourceWriter._deleteFileIfNotDirectory Path.join(basePath, path), callback
|
|
||||||
|
|
||||||
async.series jobs, callback
|
|
||||||
|
|
||||||
_deleteFileIfNotDirectory: (path, callback = (error) ->) ->
|
|
||||||
fs.stat path, (error, stat) ->
|
|
||||||
return callback(error) if error?
|
|
||||||
if stat.isFile()
|
|
||||||
fs.unlink path, callback
|
|
||||||
else
|
|
||||||
callback()
|
|
||||||
|
|
||||||
_writeResourceToDisk: (project_id, resource, basePath, callback = (error) ->) ->
|
|
||||||
path = Path.normalize(Path.join(basePath, resource.path))
|
|
||||||
if (path.slice(0, basePath.length) != basePath)
|
|
||||||
return callback new Error("resource path is outside root directory")
|
|
||||||
|
|
||||||
mkdirp Path.dirname(path), (error) ->
|
|
||||||
return callback(error) if error?
|
|
||||||
# TODO: Don't overwrite file if it hasn't been modified
|
|
||||||
if resource.url?
|
|
||||||
UrlCache.downloadUrlToFile(
|
|
||||||
project_id,
|
|
||||||
resource.url,
|
|
||||||
path,
|
|
||||||
resource.modified,
|
|
||||||
callback
|
|
||||||
)
|
|
||||||
else
|
|
||||||
fs.writeFile path, resource.content, callback
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,113 +0,0 @@
|
|||||||
db = require("./db")
|
|
||||||
UrlFetcher = require("./UrlFetcher")
|
|
||||||
Settings = require("settings-sharelatex")
|
|
||||||
crypto = require("crypto")
|
|
||||||
fs = require("fs")
|
|
||||||
logger = require "logger-sharelatex"
|
|
||||||
async = require "async"
|
|
||||||
|
|
||||||
module.exports = UrlCache =
|
|
||||||
downloadUrlToFile: (project_id, url, destPath, lastModified, callback = (error) ->) ->
|
|
||||||
UrlCache._ensureUrlIsInCache project_id, url, lastModified, (error, pathToCachedUrl) =>
|
|
||||||
return callback(error) if error?
|
|
||||||
UrlCache._copyFile(pathToCachedUrl, destPath, callback)
|
|
||||||
|
|
||||||
clearProject: (project_id, callback = (error) ->) ->
|
|
||||||
UrlCache._findAllUrlsInProject project_id, (error, urls) ->
|
|
||||||
logger.log project_id: project_id, url_count: urls.length, "clearing project URLs"
|
|
||||||
return callback(error) if error?
|
|
||||||
jobs = for url in (urls or [])
|
|
||||||
do (url) ->
|
|
||||||
(callback) ->
|
|
||||||
UrlCache._clearUrlFromCache project_id, url, (error) ->
|
|
||||||
if error?
|
|
||||||
logger.error err: error, project_id: project_id, url: url, "error clearing project URL"
|
|
||||||
callback()
|
|
||||||
async.series jobs, callback
|
|
||||||
|
|
||||||
_ensureUrlIsInCache: (project_id, url, lastModified, callback = (error, pathOnDisk) ->) ->
|
|
||||||
if lastModified?
|
|
||||||
# MYSQL only stores dates to an accuracy of a second but the incoming lastModified might have milliseconds.
|
|
||||||
# So round down to seconds
|
|
||||||
lastModified = new Date(Math.floor(lastModified.getTime() / 1000) * 1000)
|
|
||||||
UrlCache._doesUrlNeedDownloading project_id, url, lastModified, (error, needsDownloading) =>
|
|
||||||
return callback(error) if error?
|
|
||||||
if needsDownloading
|
|
||||||
logger.log url: url, lastModified: lastModified, "downloading URL"
|
|
||||||
UrlFetcher.pipeUrlToFile url, UrlCache._cacheFilePathForUrl(project_id, url), (error) =>
|
|
||||||
return callback(error) if error?
|
|
||||||
UrlCache._updateOrCreateUrlDetails project_id, url, lastModified, (error) =>
|
|
||||||
return callback(error) if error?
|
|
||||||
callback null, UrlCache._cacheFilePathForUrl(project_id, url)
|
|
||||||
else
|
|
||||||
logger.log url: url, lastModified: lastModified, "URL is up to date in cache"
|
|
||||||
callback null, UrlCache._cacheFilePathForUrl(project_id, url)
|
|
||||||
|
|
||||||
_doesUrlNeedDownloading: (project_id, url, lastModified, callback = (error, needsDownloading) ->) ->
|
|
||||||
if !lastModified?
|
|
||||||
return callback null, true
|
|
||||||
|
|
||||||
UrlCache._findUrlDetails project_id, url, (error, urlDetails) ->
|
|
||||||
return callback(error) if error?
|
|
||||||
if !urlDetails? or !urlDetails.lastModified? or urlDetails.lastModified.getTime() < lastModified.getTime()
|
|
||||||
return callback null, true
|
|
||||||
else
|
|
||||||
return callback null, false
|
|
||||||
|
|
||||||
_cacheFileNameForUrl: (project_id, url) ->
|
|
||||||
project_id + ":" + crypto.createHash("md5").update(url).digest("hex")
|
|
||||||
|
|
||||||
_cacheFilePathForUrl: (project_id, url) ->
|
|
||||||
"#{Settings.path.clsiCacheDir}/#{UrlCache._cacheFileNameForUrl(project_id, url)}"
|
|
||||||
|
|
||||||
_copyFile: (from, to, _callback = (error) ->) ->
|
|
||||||
callbackOnce = (error) ->
|
|
||||||
_callback(error)
|
|
||||||
_callback = () ->
|
|
||||||
writeStream = fs.createWriteStream(to)
|
|
||||||
readStream = fs.createReadStream(from)
|
|
||||||
writeStream.on "error", callbackOnce
|
|
||||||
readStream.on "error", callbackOnce
|
|
||||||
writeStream.on "close", () -> callbackOnce()
|
|
||||||
readStream.pipe(writeStream)
|
|
||||||
|
|
||||||
_clearUrlFromCache: (project_id, url, callback = (error) ->) ->
|
|
||||||
UrlCache._clearUrlDetails project_id, url, (error) ->
|
|
||||||
return callback(error) if error?
|
|
||||||
UrlCache._deleteUrlCacheFromDisk project_id, url, (error) ->
|
|
||||||
return callback(error) if error?
|
|
||||||
callback null
|
|
||||||
|
|
||||||
_deleteUrlCacheFromDisk: (project_id, url, callback = (error) ->) ->
|
|
||||||
fs.unlink UrlCache._cacheFilePathForUrl(project_id, url), callback
|
|
||||||
|
|
||||||
_findUrlDetails: (project_id, url, callback = (error, urlDetails) ->) ->
|
|
||||||
db.UrlCache.find(where: { url: url, project_id: project_id })
|
|
||||||
.success((urlDetails) -> callback null, urlDetails)
|
|
||||||
.error callback
|
|
||||||
|
|
||||||
_updateOrCreateUrlDetails: (project_id, url, lastModified, callback = (error) ->) ->
|
|
||||||
db.UrlCache.findOrCreate(url: url, project_id: project_id)
|
|
||||||
.success(
|
|
||||||
(urlDetails) ->
|
|
||||||
urlDetails.updateAttributes(lastModified: lastModified)
|
|
||||||
.success(() -> callback())
|
|
||||||
.error(callback)
|
|
||||||
)
|
|
||||||
.error callback
|
|
||||||
|
|
||||||
_clearUrlDetails: (project_id, url, callback = (error) ->) ->
|
|
||||||
db.UrlCache.destroy(url: url, project_id: project_id)
|
|
||||||
.success(() -> callback null)
|
|
||||||
.error callback
|
|
||||||
|
|
||||||
_findAllUrlsInProject: (project_id, callback = (error, urls) ->) ->
|
|
||||||
db.UrlCache.findAll(where: { project_id: project_id })
|
|
||||||
.success(
|
|
||||||
(urlEntries) ->
|
|
||||||
callback null, urlEntries.map((entry) -> entry.url)
|
|
||||||
)
|
|
||||||
.error callback
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
request = require("request").defaults(jar: false)
|
|
||||||
fs = require("fs")
|
|
||||||
|
|
||||||
module.exports = UrlFetcher =
|
|
||||||
pipeUrlToFile: (url, filePath, _callback = (error) ->) ->
|
|
||||||
callbackOnce = (error) ->
|
|
||||||
_callback(error)
|
|
||||||
_callback = () ->
|
|
||||||
|
|
||||||
urlStream = request.get(url)
|
|
||||||
fileStream = fs.createWriteStream(filePath)
|
|
||||||
|
|
||||||
urlStream.on "response", (res) ->
|
|
||||||
if res.statusCode >= 200 and res.statusCode < 300
|
|
||||||
urlStream.pipe(fileStream)
|
|
||||||
else
|
|
||||||
callbackOnce(new Error("URL returned non-success status code: #{res.statusCode} #{url}"))
|
|
||||||
|
|
||||||
urlStream.on "error", (error) ->
|
|
||||||
callbackOnce(error or new Error("Something went wrong downloading the URL #{url}"))
|
|
||||||
|
|
||||||
urlStream.on "end", () ->
|
|
||||||
callbackOnce()
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
Sequelize = require("sequelize")
|
|
||||||
Settings = require("settings-sharelatex")
|
|
||||||
|
|
||||||
sequelize = new Sequelize(
|
|
||||||
Settings.mysql.clsi.database,
|
|
||||||
Settings.mysql.clsi.username,
|
|
||||||
Settings.mysql.clsi.password,
|
|
||||||
Settings.mysql.clsi
|
|
||||||
)
|
|
||||||
|
|
||||||
module.exports =
|
|
||||||
UrlCache: sequelize.define("UrlCache", {
|
|
||||||
url: Sequelize.STRING
|
|
||||||
project_id: Sequelize.STRING
|
|
||||||
lastModified: Sequelize.DATE
|
|
||||||
})
|
|
||||||
|
|
||||||
Project: sequelize.define("Project", {
|
|
||||||
project_id: Sequelize.STRING
|
|
||||||
lastAccessed: Sequelize.DATE
|
|
||||||
})
|
|
||||||
|
|
||||||
sync: () -> sequelize.sync()
|
|
||||||
|
|
||||||
20
app/js/CommandRunner.js
Normal file
20
app/js/CommandRunner.js
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Sanity-check the conversion and remove this comment.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let commandRunnerPath
|
||||||
|
const Settings = require('settings-sharelatex')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
|
||||||
|
if ((Settings.clsi != null ? Settings.clsi.dockerRunner : undefined) === true) {
|
||||||
|
commandRunnerPath = './DockerRunner'
|
||||||
|
} else {
|
||||||
|
commandRunnerPath = './LocalCommandRunner'
|
||||||
|
}
|
||||||
|
logger.info({ commandRunnerPath }, 'selecting command runner for clsi')
|
||||||
|
const CommandRunner = require(commandRunnerPath)
|
||||||
|
|
||||||
|
module.exports = CommandRunner
|
||||||
255
app/js/CompileController.js
Normal file
255
app/js/CompileController.js
Normal file
@@ -0,0 +1,255 @@
|
|||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
handle-callback-err,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS101: Remove unnecessary use of Array.from
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let CompileController
|
||||||
|
const RequestParser = require('./RequestParser')
|
||||||
|
const CompileManager = require('./CompileManager')
|
||||||
|
const Settings = require('settings-sharelatex')
|
||||||
|
const Metrics = require('./Metrics')
|
||||||
|
const ProjectPersistenceManager = require('./ProjectPersistenceManager')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const Errors = require('./Errors')
|
||||||
|
|
||||||
|
module.exports = CompileController = {
|
||||||
|
compile(req, res, next) {
|
||||||
|
if (next == null) {
|
||||||
|
next = function (error) {}
|
||||||
|
}
|
||||||
|
const timer = new Metrics.Timer('compile-request')
|
||||||
|
return RequestParser.parse(req.body, function (error, request) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
request.project_id = req.params.project_id
|
||||||
|
if (req.params.user_id != null) {
|
||||||
|
request.user_id = req.params.user_id
|
||||||
|
}
|
||||||
|
return ProjectPersistenceManager.markProjectAsJustAccessed(
|
||||||
|
request.project_id,
|
||||||
|
function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
return CompileManager.doCompileWithLock(request, function (
|
||||||
|
error,
|
||||||
|
outputFiles
|
||||||
|
) {
|
||||||
|
let code, status
|
||||||
|
if (outputFiles == null) {
|
||||||
|
outputFiles = []
|
||||||
|
}
|
||||||
|
if (error instanceof Errors.AlreadyCompilingError) {
|
||||||
|
code = 423 // Http 423 Locked
|
||||||
|
status = 'compile-in-progress'
|
||||||
|
} else if (error instanceof Errors.FilesOutOfSyncError) {
|
||||||
|
code = 409 // Http 409 Conflict
|
||||||
|
status = 'retry'
|
||||||
|
} else if (error && error.code === 'EPIPE') {
|
||||||
|
// docker returns EPIPE when shutting down
|
||||||
|
code = 503 // send 503 Unavailable response
|
||||||
|
status = 'unavailable'
|
||||||
|
} else if (error != null ? error.terminated : undefined) {
|
||||||
|
status = 'terminated'
|
||||||
|
} else if (error != null ? error.validate : undefined) {
|
||||||
|
status = `validation-${error.validate}`
|
||||||
|
} else if (error != null ? error.timedout : undefined) {
|
||||||
|
status = 'timedout'
|
||||||
|
logger.log(
|
||||||
|
{ err: error, project_id: request.project_id },
|
||||||
|
'timeout running compile'
|
||||||
|
)
|
||||||
|
} else if (error != null) {
|
||||||
|
status = 'error'
|
||||||
|
code = 500
|
||||||
|
logger.warn(
|
||||||
|
{ err: error, project_id: request.project_id },
|
||||||
|
'error running compile'
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
let file
|
||||||
|
status = 'failure'
|
||||||
|
for (file of Array.from(outputFiles)) {
|
||||||
|
if (
|
||||||
|
file.path != null
|
||||||
|
? file.path.match(/output\.pdf$/)
|
||||||
|
: undefined
|
||||||
|
) {
|
||||||
|
status = 'success'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (status === 'failure') {
|
||||||
|
logger.warn(
|
||||||
|
{ project_id: request.project_id, outputFiles },
|
||||||
|
'project failed to compile successfully, no output.pdf generated'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// log an error if any core files are found
|
||||||
|
for (file of Array.from(outputFiles)) {
|
||||||
|
if (file.path === 'core') {
|
||||||
|
logger.error(
|
||||||
|
{ project_id: request.project_id, req, outputFiles },
|
||||||
|
'core file found in output'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error != null) {
|
||||||
|
outputFiles = error.outputFiles || []
|
||||||
|
}
|
||||||
|
|
||||||
|
timer.done()
|
||||||
|
return res.status(code || 200).send({
|
||||||
|
compile: {
|
||||||
|
status,
|
||||||
|
error: (error != null ? error.message : undefined) || error,
|
||||||
|
outputFiles: outputFiles.map((file) => ({
|
||||||
|
url:
|
||||||
|
`${Settings.apis.clsi.url}/project/${request.project_id}` +
|
||||||
|
(request.user_id != null
|
||||||
|
? `/user/${request.user_id}`
|
||||||
|
: '') +
|
||||||
|
(file.build != null ? `/build/${file.build}` : '') +
|
||||||
|
`/output/${file.path}`,
|
||||||
|
path: file.path,
|
||||||
|
type: file.type,
|
||||||
|
build: file.build
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
stopCompile(req, res, next) {
|
||||||
|
const { project_id, user_id } = req.params
|
||||||
|
return CompileManager.stopCompile(project_id, user_id, function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
return res.sendStatus(204)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
clearCache(req, res, next) {
|
||||||
|
if (next == null) {
|
||||||
|
next = function (error) {}
|
||||||
|
}
|
||||||
|
return ProjectPersistenceManager.clearProject(
|
||||||
|
req.params.project_id,
|
||||||
|
req.params.user_id,
|
||||||
|
function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
return res.sendStatus(204)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}, // No content
|
||||||
|
|
||||||
|
syncFromCode(req, res, next) {
|
||||||
|
if (next == null) {
|
||||||
|
next = function (error) {}
|
||||||
|
}
|
||||||
|
const { file } = req.query
|
||||||
|
const line = parseInt(req.query.line, 10)
|
||||||
|
const column = parseInt(req.query.column, 10)
|
||||||
|
const { project_id } = req.params
|
||||||
|
const { user_id } = req.params
|
||||||
|
return CompileManager.syncFromCode(
|
||||||
|
project_id,
|
||||||
|
user_id,
|
||||||
|
file,
|
||||||
|
line,
|
||||||
|
column,
|
||||||
|
function (error, pdfPositions) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
return res.json({
|
||||||
|
pdf: pdfPositions
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
syncFromPdf(req, res, next) {
|
||||||
|
if (next == null) {
|
||||||
|
next = function (error) {}
|
||||||
|
}
|
||||||
|
const page = parseInt(req.query.page, 10)
|
||||||
|
const h = parseFloat(req.query.h)
|
||||||
|
const v = parseFloat(req.query.v)
|
||||||
|
const { project_id } = req.params
|
||||||
|
const { user_id } = req.params
|
||||||
|
return CompileManager.syncFromPdf(
|
||||||
|
project_id,
|
||||||
|
user_id,
|
||||||
|
page,
|
||||||
|
h,
|
||||||
|
v,
|
||||||
|
function (error, codePositions) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
return res.json({
|
||||||
|
code: codePositions
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
wordcount(req, res, next) {
|
||||||
|
if (next == null) {
|
||||||
|
next = function (error) {}
|
||||||
|
}
|
||||||
|
const file = req.query.file || 'main.tex'
|
||||||
|
const { project_id } = req.params
|
||||||
|
const { user_id } = req.params
|
||||||
|
const { image } = req.query
|
||||||
|
if (
|
||||||
|
image &&
|
||||||
|
Settings.clsi &&
|
||||||
|
Settings.clsi.docker &&
|
||||||
|
Settings.clsi.docker.allowedImages &&
|
||||||
|
!Settings.clsi.docker.allowedImages.includes(image)
|
||||||
|
) {
|
||||||
|
return res.status(400).send('invalid image')
|
||||||
|
}
|
||||||
|
logger.log({ image, file, project_id }, 'word count request')
|
||||||
|
|
||||||
|
return CompileManager.wordcount(project_id, user_id, file, image, function (
|
||||||
|
error,
|
||||||
|
result
|
||||||
|
) {
|
||||||
|
if (error != null) {
|
||||||
|
return next(error)
|
||||||
|
}
|
||||||
|
return res.json({
|
||||||
|
texcount: result
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
status(req, res, next) {
|
||||||
|
if (next == null) {
|
||||||
|
next = function (error) {}
|
||||||
|
}
|
||||||
|
return res.send('OK')
|
||||||
|
}
|
||||||
|
}
|
||||||
693
app/js/CompileManager.js
Normal file
693
app/js/CompileManager.js
Normal file
@@ -0,0 +1,693 @@
|
|||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
handle-callback-err,
|
||||||
|
no-return-assign,
|
||||||
|
no-undef,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS101: Remove unnecessary use of Array.from
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS103: Rewrite code to no longer use __guard__
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let CompileManager
|
||||||
|
const ResourceWriter = require('./ResourceWriter')
|
||||||
|
const LatexRunner = require('./LatexRunner')
|
||||||
|
const OutputFileFinder = require('./OutputFileFinder')
|
||||||
|
const OutputCacheManager = require('./OutputCacheManager')
|
||||||
|
const Settings = require('settings-sharelatex')
|
||||||
|
const Path = require('path')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const Metrics = require('./Metrics')
|
||||||
|
const child_process = require('child_process')
|
||||||
|
const DraftModeManager = require('./DraftModeManager')
|
||||||
|
const TikzManager = require('./TikzManager')
|
||||||
|
const LockManager = require('./LockManager')
|
||||||
|
const fs = require('fs')
|
||||||
|
const fse = require('fs-extra')
|
||||||
|
const os = require('os')
|
||||||
|
const async = require('async')
|
||||||
|
const Errors = require('./Errors')
|
||||||
|
const CommandRunner = require('./CommandRunner')
|
||||||
|
|
||||||
|
const getCompileName = function (project_id, user_id) {
|
||||||
|
if (user_id != null) {
|
||||||
|
return `${project_id}-${user_id}`
|
||||||
|
} else {
|
||||||
|
return project_id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const getCompileDir = (project_id, user_id) =>
|
||||||
|
Path.join(Settings.path.compilesDir, getCompileName(project_id, user_id))
|
||||||
|
|
||||||
|
module.exports = CompileManager = {
|
||||||
|
doCompileWithLock(request, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, outputFiles) {}
|
||||||
|
}
|
||||||
|
const compileDir = getCompileDir(request.project_id, request.user_id)
|
||||||
|
const lockFile = Path.join(compileDir, '.project-lock')
|
||||||
|
// use a .project-lock file in the compile directory to prevent
|
||||||
|
// simultaneous compiles
|
||||||
|
return fse.ensureDir(compileDir, function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return LockManager.runWithLock(
|
||||||
|
lockFile,
|
||||||
|
(releaseLock) => CompileManager.doCompile(request, releaseLock),
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
doCompile(request, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, outputFiles) {}
|
||||||
|
}
|
||||||
|
const compileDir = getCompileDir(request.project_id, request.user_id)
|
||||||
|
let timer = new Metrics.Timer('write-to-disk')
|
||||||
|
logger.log(
|
||||||
|
{ project_id: request.project_id, user_id: request.user_id },
|
||||||
|
'syncing resources to disk'
|
||||||
|
)
|
||||||
|
return ResourceWriter.syncResourcesToDisk(request, compileDir, function (
|
||||||
|
error,
|
||||||
|
resourceList
|
||||||
|
) {
|
||||||
|
// NOTE: resourceList is insecure, it should only be used to exclude files from the output list
|
||||||
|
if (error != null && error instanceof Errors.FilesOutOfSyncError) {
|
||||||
|
logger.warn(
|
||||||
|
{ project_id: request.project_id, user_id: request.user_id },
|
||||||
|
'files out of sync, please retry'
|
||||||
|
)
|
||||||
|
return callback(error)
|
||||||
|
} else if (error != null) {
|
||||||
|
logger.err(
|
||||||
|
{
|
||||||
|
err: error,
|
||||||
|
project_id: request.project_id,
|
||||||
|
user_id: request.user_id
|
||||||
|
},
|
||||||
|
'error writing resources to disk'
|
||||||
|
)
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
logger.log(
|
||||||
|
{
|
||||||
|
project_id: request.project_id,
|
||||||
|
user_id: request.user_id,
|
||||||
|
time_taken: Date.now() - timer.start
|
||||||
|
},
|
||||||
|
'written files to disk'
|
||||||
|
)
|
||||||
|
timer.done()
|
||||||
|
|
||||||
|
const injectDraftModeIfRequired = function (callback) {
|
||||||
|
if (request.draft) {
|
||||||
|
return DraftModeManager.injectDraftMode(
|
||||||
|
Path.join(compileDir, request.rootResourcePath),
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const createTikzFileIfRequired = (callback) =>
|
||||||
|
TikzManager.checkMainFile(
|
||||||
|
compileDir,
|
||||||
|
request.rootResourcePath,
|
||||||
|
resourceList,
|
||||||
|
function (error, needsMainFile) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
if (needsMainFile) {
|
||||||
|
return TikzManager.injectOutputFile(
|
||||||
|
compileDir,
|
||||||
|
request.rootResourcePath,
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
// set up environment variables for chktex
|
||||||
|
const env = {}
|
||||||
|
if (Settings.texliveOpenoutAny && Settings.texliveOpenoutAny !== '') {
|
||||||
|
// override default texlive openout_any environment variable
|
||||||
|
env.openout_any = Settings.texliveOpenoutAny
|
||||||
|
}
|
||||||
|
// only run chktex on LaTeX files (not knitr .Rtex files or any others)
|
||||||
|
const isLaTeXFile =
|
||||||
|
request.rootResourcePath != null
|
||||||
|
? request.rootResourcePath.match(/\.tex$/i)
|
||||||
|
: undefined
|
||||||
|
if (request.check != null && isLaTeXFile) {
|
||||||
|
env.CHKTEX_OPTIONS = '-nall -e9 -e10 -w15 -w16'
|
||||||
|
env.CHKTEX_ULIMIT_OPTIONS = '-t 5 -v 64000'
|
||||||
|
if (request.check === 'error') {
|
||||||
|
env.CHKTEX_EXIT_ON_ERROR = 1
|
||||||
|
}
|
||||||
|
if (request.check === 'validate') {
|
||||||
|
env.CHKTEX_VALIDATE = 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// apply a series of file modifications/creations for draft mode and tikz
|
||||||
|
return async.series(
|
||||||
|
[injectDraftModeIfRequired, createTikzFileIfRequired],
|
||||||
|
function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
timer = new Metrics.Timer('run-compile')
|
||||||
|
// find the image tag to log it as a metric, e.g. 2015.1 (convert . to - for graphite)
|
||||||
|
let tag =
|
||||||
|
__guard__(
|
||||||
|
__guard__(
|
||||||
|
request.imageName != null
|
||||||
|
? request.imageName.match(/:(.*)/)
|
||||||
|
: undefined,
|
||||||
|
(x1) => x1[1]
|
||||||
|
),
|
||||||
|
(x) => x.replace(/\./g, '-')
|
||||||
|
) || 'default'
|
||||||
|
if (!request.project_id.match(/^[0-9a-f]{24}$/)) {
|
||||||
|
tag = 'other'
|
||||||
|
} // exclude smoke test
|
||||||
|
Metrics.inc('compiles')
|
||||||
|
Metrics.inc(`compiles-with-image.${tag}`)
|
||||||
|
const compileName = getCompileName(
|
||||||
|
request.project_id,
|
||||||
|
request.user_id
|
||||||
|
)
|
||||||
|
return LatexRunner.runLatex(
|
||||||
|
compileName,
|
||||||
|
{
|
||||||
|
directory: compileDir,
|
||||||
|
mainFile: request.rootResourcePath,
|
||||||
|
compiler: request.compiler,
|
||||||
|
timeout: request.timeout,
|
||||||
|
image: request.imageName,
|
||||||
|
flags: request.flags,
|
||||||
|
environment: env,
|
||||||
|
compileGroup: request.compileGroup
|
||||||
|
},
|
||||||
|
function (error, output, stats, timings) {
|
||||||
|
// request was for validation only
|
||||||
|
let metric_key, metric_value
|
||||||
|
if (request.check === 'validate') {
|
||||||
|
const result = (error != null ? error.code : undefined)
|
||||||
|
? 'fail'
|
||||||
|
: 'pass'
|
||||||
|
error = new Error('validation')
|
||||||
|
error.validate = result
|
||||||
|
}
|
||||||
|
// request was for compile, and failed on validation
|
||||||
|
if (
|
||||||
|
request.check === 'error' &&
|
||||||
|
(error != null ? error.message : undefined) === 'exited'
|
||||||
|
) {
|
||||||
|
error = new Error('compilation')
|
||||||
|
error.validate = 'fail'
|
||||||
|
}
|
||||||
|
// compile was killed by user, was a validation, or a compile which failed validation
|
||||||
|
if (
|
||||||
|
(error != null ? error.terminated : undefined) ||
|
||||||
|
(error != null ? error.validate : undefined) ||
|
||||||
|
(error != null ? error.timedout : undefined)
|
||||||
|
) {
|
||||||
|
OutputFileFinder.findOutputFiles(
|
||||||
|
resourceList,
|
||||||
|
compileDir,
|
||||||
|
function (err, outputFiles) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
error.outputFiles = outputFiles // return output files so user can check logs
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// compile completed normally
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
Metrics.inc('compiles-succeeded')
|
||||||
|
const object = stats || {}
|
||||||
|
for (metric_key in object) {
|
||||||
|
metric_value = object[metric_key]
|
||||||
|
Metrics.count(metric_key, metric_value)
|
||||||
|
}
|
||||||
|
const object1 = timings || {}
|
||||||
|
for (metric_key in object1) {
|
||||||
|
metric_value = object1[metric_key]
|
||||||
|
Metrics.timing(metric_key, metric_value)
|
||||||
|
}
|
||||||
|
const loadavg =
|
||||||
|
typeof os.loadavg === 'function' ? os.loadavg() : undefined
|
||||||
|
if (loadavg != null) {
|
||||||
|
Metrics.gauge('load-avg', loadavg[0])
|
||||||
|
}
|
||||||
|
const ts = timer.done()
|
||||||
|
logger.log(
|
||||||
|
{
|
||||||
|
project_id: request.project_id,
|
||||||
|
user_id: request.user_id,
|
||||||
|
time_taken: ts,
|
||||||
|
stats,
|
||||||
|
timings,
|
||||||
|
loadavg
|
||||||
|
},
|
||||||
|
'done compile'
|
||||||
|
)
|
||||||
|
if ((stats != null ? stats['latex-runs'] : undefined) > 0) {
|
||||||
|
Metrics.timing('run-compile-per-pass', ts / stats['latex-runs'])
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
(stats != null ? stats['latex-runs'] : undefined) > 0 &&
|
||||||
|
(timings != null ? timings['cpu-time'] : undefined) > 0
|
||||||
|
) {
|
||||||
|
Metrics.timing(
|
||||||
|
'run-compile-cpu-time-per-pass',
|
||||||
|
timings['cpu-time'] / stats['latex-runs']
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return OutputFileFinder.findOutputFiles(
|
||||||
|
resourceList,
|
||||||
|
compileDir,
|
||||||
|
function (error, outputFiles) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return OutputCacheManager.saveOutputFiles(
|
||||||
|
outputFiles,
|
||||||
|
compileDir,
|
||||||
|
(error, newOutputFiles) => callback(null, newOutputFiles)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
stopCompile(project_id, user_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
const compileName = getCompileName(project_id, user_id)
|
||||||
|
return LatexRunner.killLatex(compileName, callback)
|
||||||
|
},
|
||||||
|
|
||||||
|
clearProject(project_id, user_id, _callback) {
|
||||||
|
if (_callback == null) {
|
||||||
|
_callback = function (error) {}
|
||||||
|
}
|
||||||
|
const callback = function (error) {
|
||||||
|
_callback(error)
|
||||||
|
return (_callback = function () {})
|
||||||
|
}
|
||||||
|
|
||||||
|
const compileDir = getCompileDir(project_id, user_id)
|
||||||
|
|
||||||
|
return CompileManager._checkDirectory(compileDir, function (err, exists) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
if (!exists) {
|
||||||
|
return callback()
|
||||||
|
} // skip removal if no directory present
|
||||||
|
|
||||||
|
const proc = child_process.spawn('rm', ['-r', compileDir])
|
||||||
|
|
||||||
|
proc.on('error', callback)
|
||||||
|
|
||||||
|
let stderr = ''
|
||||||
|
proc.stderr.setEncoding('utf8').on('data', (chunk) => (stderr += chunk))
|
||||||
|
|
||||||
|
return proc.on('close', function (code) {
|
||||||
|
if (code === 0) {
|
||||||
|
return callback(null)
|
||||||
|
} else {
|
||||||
|
return callback(new Error(`rm -r ${compileDir} failed: ${stderr}`))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_findAllDirs(callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, allDirs) {}
|
||||||
|
}
|
||||||
|
const root = Settings.path.compilesDir
|
||||||
|
return fs.readdir(root, function (err, files) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
const allDirs = Array.from(files).map((file) => Path.join(root, file))
|
||||||
|
return callback(null, allDirs)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
clearExpiredProjects(max_cache_age_ms, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
const now = Date.now()
|
||||||
|
// action for each directory
|
||||||
|
const expireIfNeeded = (checkDir, cb) =>
|
||||||
|
fs.stat(checkDir, function (err, stats) {
|
||||||
|
if (err != null) {
|
||||||
|
return cb()
|
||||||
|
} // ignore errors checking directory
|
||||||
|
const age = now - stats.mtime
|
||||||
|
const hasExpired = age > max_cache_age_ms
|
||||||
|
if (hasExpired) {
|
||||||
|
return fse.remove(checkDir, cb)
|
||||||
|
} else {
|
||||||
|
return cb()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
// iterate over all project directories
|
||||||
|
return CompileManager._findAllDirs(function (error, allDirs) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
return async.eachSeries(allDirs, expireIfNeeded, callback)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_checkDirectory(compileDir, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, exists) {}
|
||||||
|
}
|
||||||
|
return fs.lstat(compileDir, function (err, stats) {
|
||||||
|
if ((err != null ? err.code : undefined) === 'ENOENT') {
|
||||||
|
return callback(null, false) // directory does not exist
|
||||||
|
} else if (err != null) {
|
||||||
|
logger.err(
|
||||||
|
{ dir: compileDir, err },
|
||||||
|
'error on stat of project directory for removal'
|
||||||
|
)
|
||||||
|
return callback(err)
|
||||||
|
} else if (!(stats != null ? stats.isDirectory() : undefined)) {
|
||||||
|
logger.err(
|
||||||
|
{ dir: compileDir, stats },
|
||||||
|
'bad project directory for removal'
|
||||||
|
)
|
||||||
|
return callback(new Error('project directory is not directory'))
|
||||||
|
} else {
|
||||||
|
return callback(null, true)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}, // directory exists
|
||||||
|
|
||||||
|
syncFromCode(project_id, user_id, file_name, line, column, callback) {
|
||||||
|
// If LaTeX was run in a virtual environment, the file path that synctex expects
|
||||||
|
// might not match the file path on the host. The .synctex.gz file however, will be accessed
|
||||||
|
// wherever it is on the host.
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, pdfPositions) {}
|
||||||
|
}
|
||||||
|
const compileName = getCompileName(project_id, user_id)
|
||||||
|
const base_dir = Settings.path.synctexBaseDir(compileName)
|
||||||
|
const file_path = base_dir + '/' + file_name
|
||||||
|
const compileDir = getCompileDir(project_id, user_id)
|
||||||
|
const synctex_path = `${base_dir}/output.pdf`
|
||||||
|
const command = ['code', synctex_path, file_path, line, column]
|
||||||
|
CompileManager._runSynctex(project_id, user_id, command, function (
|
||||||
|
error,
|
||||||
|
stdout
|
||||||
|
) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
logger.log(
|
||||||
|
{ project_id, user_id, file_name, line, column, command, stdout },
|
||||||
|
'synctex code output'
|
||||||
|
)
|
||||||
|
return callback(null, CompileManager._parseSynctexFromCodeOutput(stdout))
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
syncFromPdf(project_id, user_id, page, h, v, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, filePositions) {}
|
||||||
|
}
|
||||||
|
const compileName = getCompileName(project_id, user_id)
|
||||||
|
const compileDir = getCompileDir(project_id, user_id)
|
||||||
|
const base_dir = Settings.path.synctexBaseDir(compileName)
|
||||||
|
const synctex_path = `${base_dir}/output.pdf`
|
||||||
|
const command = ['pdf', synctex_path, page, h, v]
|
||||||
|
CompileManager._runSynctex(project_id, user_id, command, function (
|
||||||
|
error,
|
||||||
|
stdout
|
||||||
|
) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
logger.log(
|
||||||
|
{ project_id, user_id, page, h, v, stdout },
|
||||||
|
'synctex pdf output'
|
||||||
|
)
|
||||||
|
return callback(
|
||||||
|
null,
|
||||||
|
CompileManager._parseSynctexFromPdfOutput(stdout, base_dir)
|
||||||
|
)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_checkFileExists(dir, filename, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
const file = Path.join(dir, filename)
|
||||||
|
return fs.stat(dir, function (error, stats) {
|
||||||
|
if ((error != null ? error.code : undefined) === 'ENOENT') {
|
||||||
|
return callback(new Errors.NotFoundError('no output directory'))
|
||||||
|
}
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return fs.stat(file, function (error, stats) {
|
||||||
|
if ((error != null ? error.code : undefined) === 'ENOENT') {
|
||||||
|
return callback(new Errors.NotFoundError('no output file'))
|
||||||
|
}
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
if (!(stats != null ? stats.isFile() : undefined)) {
|
||||||
|
return callback(new Error('not a file'))
|
||||||
|
}
|
||||||
|
return callback()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_runSynctex(project_id, user_id, command, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, stdout) {}
|
||||||
|
}
|
||||||
|
const seconds = 1000
|
||||||
|
|
||||||
|
command.unshift('/opt/synctex')
|
||||||
|
|
||||||
|
const directory = getCompileDir(project_id, user_id)
|
||||||
|
const timeout = 60 * 1000 // increased to allow for large projects
|
||||||
|
const compileName = getCompileName(project_id, user_id)
|
||||||
|
const compileGroup = 'synctex'
|
||||||
|
CompileManager._checkFileExists(directory, 'output.synctex.gz', (error) => {
|
||||||
|
if (error) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return CommandRunner.run(
|
||||||
|
compileName,
|
||||||
|
command,
|
||||||
|
directory,
|
||||||
|
Settings.clsi && Settings.clsi.docker
|
||||||
|
? Settings.clsi.docker.image
|
||||||
|
: undefined,
|
||||||
|
timeout,
|
||||||
|
{},
|
||||||
|
compileGroup,
|
||||||
|
function (error, output) {
|
||||||
|
if (error != null) {
|
||||||
|
logger.err(
|
||||||
|
{ err: error, command, project_id, user_id },
|
||||||
|
'error running synctex'
|
||||||
|
)
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return callback(null, output.stdout)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_parseSynctexFromCodeOutput(output) {
|
||||||
|
const results = []
|
||||||
|
for (const line of Array.from(output.split('\n'))) {
|
||||||
|
const [node, page, h, v, width, height] = Array.from(line.split('\t'))
|
||||||
|
if (node === 'NODE') {
|
||||||
|
results.push({
|
||||||
|
page: parseInt(page, 10),
|
||||||
|
h: parseFloat(h),
|
||||||
|
v: parseFloat(v),
|
||||||
|
height: parseFloat(height),
|
||||||
|
width: parseFloat(width)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return results
|
||||||
|
},
|
||||||
|
|
||||||
|
_parseSynctexFromPdfOutput(output, base_dir) {
|
||||||
|
const results = []
|
||||||
|
for (let line of Array.from(output.split('\n'))) {
|
||||||
|
let column, file_path, node
|
||||||
|
;[node, file_path, line, column] = Array.from(line.split('\t'))
|
||||||
|
if (node === 'NODE') {
|
||||||
|
const file = file_path.slice(base_dir.length + 1)
|
||||||
|
results.push({
|
||||||
|
file,
|
||||||
|
line: parseInt(line, 10),
|
||||||
|
column: parseInt(column, 10)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return results
|
||||||
|
},
|
||||||
|
|
||||||
|
wordcount(project_id, user_id, file_name, image, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, pdfPositions) {}
|
||||||
|
}
|
||||||
|
logger.log({ project_id, user_id, file_name, image }, 'running wordcount')
|
||||||
|
const file_path = `$COMPILE_DIR/${file_name}`
|
||||||
|
const command = [
|
||||||
|
'texcount',
|
||||||
|
'-nocol',
|
||||||
|
'-inc',
|
||||||
|
file_path,
|
||||||
|
`-out=${file_path}.wc`
|
||||||
|
]
|
||||||
|
const compileDir = getCompileDir(project_id, user_id)
|
||||||
|
const timeout = 60 * 1000
|
||||||
|
const compileName = getCompileName(project_id, user_id)
|
||||||
|
const compileGroup = 'wordcount'
|
||||||
|
return fse.ensureDir(compileDir, function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
logger.err(
|
||||||
|
{ error, project_id, user_id, file_name },
|
||||||
|
'error ensuring dir for sync from code'
|
||||||
|
)
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return CommandRunner.run(
|
||||||
|
compileName,
|
||||||
|
command,
|
||||||
|
compileDir,
|
||||||
|
image,
|
||||||
|
timeout,
|
||||||
|
{},
|
||||||
|
compileGroup,
|
||||||
|
function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return fs.readFile(
|
||||||
|
compileDir + '/' + file_name + '.wc',
|
||||||
|
'utf-8',
|
||||||
|
function (err, stdout) {
|
||||||
|
if (err != null) {
|
||||||
|
// call it node_err so sentry doesn't use random path error as unique id so it can't be ignored
|
||||||
|
logger.err(
|
||||||
|
{ node_err: err, command, compileDir, project_id, user_id },
|
||||||
|
'error reading word count output'
|
||||||
|
)
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
const results = CompileManager._parseWordcountFromOutput(stdout)
|
||||||
|
logger.log(
|
||||||
|
{ project_id, user_id, wordcount: results },
|
||||||
|
'word count results'
|
||||||
|
)
|
||||||
|
return callback(null, results)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_parseWordcountFromOutput(output) {
|
||||||
|
const results = {
|
||||||
|
encode: '',
|
||||||
|
textWords: 0,
|
||||||
|
headWords: 0,
|
||||||
|
outside: 0,
|
||||||
|
headers: 0,
|
||||||
|
elements: 0,
|
||||||
|
mathInline: 0,
|
||||||
|
mathDisplay: 0,
|
||||||
|
errors: 0,
|
||||||
|
messages: ''
|
||||||
|
}
|
||||||
|
for (const line of Array.from(output.split('\n'))) {
|
||||||
|
const [data, info] = Array.from(line.split(':'))
|
||||||
|
if (data.indexOf('Encoding') > -1) {
|
||||||
|
results.encode = info.trim()
|
||||||
|
}
|
||||||
|
if (data.indexOf('in text') > -1) {
|
||||||
|
results.textWords = parseInt(info, 10)
|
||||||
|
}
|
||||||
|
if (data.indexOf('in head') > -1) {
|
||||||
|
results.headWords = parseInt(info, 10)
|
||||||
|
}
|
||||||
|
if (data.indexOf('outside') > -1) {
|
||||||
|
results.outside = parseInt(info, 10)
|
||||||
|
}
|
||||||
|
if (data.indexOf('of head') > -1) {
|
||||||
|
results.headers = parseInt(info, 10)
|
||||||
|
}
|
||||||
|
if (data.indexOf('Number of floats/tables/figures') > -1) {
|
||||||
|
results.elements = parseInt(info, 10)
|
||||||
|
}
|
||||||
|
if (data.indexOf('Number of math inlines') > -1) {
|
||||||
|
results.mathInline = parseInt(info, 10)
|
||||||
|
}
|
||||||
|
if (data.indexOf('Number of math displayed') > -1) {
|
||||||
|
results.mathDisplay = parseInt(info, 10)
|
||||||
|
}
|
||||||
|
if (data === '(errors') {
|
||||||
|
// errors reported as (errors:123)
|
||||||
|
results.errors = parseInt(info, 10)
|
||||||
|
}
|
||||||
|
if (line.indexOf('!!! ') > -1) {
|
||||||
|
// errors logged as !!! message !!!
|
||||||
|
results.messages += line + '\n'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function __guard__(value, transform) {
|
||||||
|
return typeof value !== 'undefined' && value !== null
|
||||||
|
? transform(value)
|
||||||
|
: undefined
|
||||||
|
}
|
||||||
38
app/js/ContentTypeMapper.js
Normal file
38
app/js/ContentTypeMapper.js
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
/* eslint-disable
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
let ContentTypeMapper
|
||||||
|
const Path = require('path')
|
||||||
|
|
||||||
|
// here we coerce html, css and js to text/plain,
|
||||||
|
// otherwise choose correct mime type based on file extension,
|
||||||
|
// falling back to octet-stream
|
||||||
|
module.exports = ContentTypeMapper = {
|
||||||
|
map(path) {
|
||||||
|
switch (Path.extname(path)) {
|
||||||
|
case '.txt':
|
||||||
|
case '.html':
|
||||||
|
case '.js':
|
||||||
|
case '.css':
|
||||||
|
case '.svg':
|
||||||
|
return 'text/plain'
|
||||||
|
case '.csv':
|
||||||
|
return 'text/csv'
|
||||||
|
case '.pdf':
|
||||||
|
return 'application/pdf'
|
||||||
|
case '.png':
|
||||||
|
return 'image/png'
|
||||||
|
case '.jpg':
|
||||||
|
case '.jpeg':
|
||||||
|
return 'image/jpeg'
|
||||||
|
case '.tiff':
|
||||||
|
return 'image/tiff'
|
||||||
|
case '.gif':
|
||||||
|
return 'image/gif'
|
||||||
|
default:
|
||||||
|
return 'application/octet-stream'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
18
app/js/DbQueue.js
Normal file
18
app/js/DbQueue.js
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Sanity-check the conversion and remove this comment.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
const async = require('async')
|
||||||
|
const Settings = require('settings-sharelatex')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const queue = async.queue(
|
||||||
|
(task, cb) => task(cb),
|
||||||
|
Settings.parallelSqlQueryLimit
|
||||||
|
)
|
||||||
|
|
||||||
|
queue.drain = () => logger.debug('all items have been processed')
|
||||||
|
|
||||||
|
module.exports = { queue }
|
||||||
113
app/js/DockerLockManager.js
Normal file
113
app/js/DockerLockManager.js
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
/* eslint-disable
|
||||||
|
handle-callback-err,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS101: Remove unnecessary use of Array.from
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let LockManager
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
|
||||||
|
const LockState = {} // locks for docker container operations, by container name
|
||||||
|
|
||||||
|
module.exports = LockManager = {
|
||||||
|
MAX_LOCK_HOLD_TIME: 15000, // how long we can keep a lock
|
||||||
|
MAX_LOCK_WAIT_TIME: 10000, // how long we wait for a lock
|
||||||
|
LOCK_TEST_INTERVAL: 1000, // retry time
|
||||||
|
|
||||||
|
tryLock(key, callback) {
|
||||||
|
let lockValue
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err, gotLock) {}
|
||||||
|
}
|
||||||
|
const existingLock = LockState[key]
|
||||||
|
if (existingLock != null) {
|
||||||
|
// the lock is already taken, check how old it is
|
||||||
|
const lockAge = Date.now() - existingLock.created
|
||||||
|
if (lockAge < LockManager.MAX_LOCK_HOLD_TIME) {
|
||||||
|
return callback(null, false) // we didn't get the lock, bail out
|
||||||
|
} else {
|
||||||
|
logger.error(
|
||||||
|
{ key, lock: existingLock, age: lockAge },
|
||||||
|
'taking old lock by force'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// take the lock
|
||||||
|
LockState[key] = lockValue = { created: Date.now() }
|
||||||
|
return callback(null, true, lockValue)
|
||||||
|
},
|
||||||
|
|
||||||
|
getLock(key, callback) {
|
||||||
|
let attempt
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, lockValue) {}
|
||||||
|
}
|
||||||
|
const startTime = Date.now()
|
||||||
|
return (attempt = () =>
|
||||||
|
LockManager.tryLock(key, function (error, gotLock, lockValue) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
if (gotLock) {
|
||||||
|
return callback(null, lockValue)
|
||||||
|
} else if (Date.now() - startTime > LockManager.MAX_LOCK_WAIT_TIME) {
|
||||||
|
const e = new Error('Lock timeout')
|
||||||
|
e.key = key
|
||||||
|
return callback(e)
|
||||||
|
} else {
|
||||||
|
return setTimeout(attempt, LockManager.LOCK_TEST_INTERVAL)
|
||||||
|
}
|
||||||
|
}))()
|
||||||
|
},
|
||||||
|
|
||||||
|
releaseLock(key, lockValue, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
const existingLock = LockState[key]
|
||||||
|
if (existingLock === lockValue) {
|
||||||
|
// lockValue is an object, so we can test by reference
|
||||||
|
delete LockState[key] // our lock, so we can free it
|
||||||
|
return callback()
|
||||||
|
} else if (existingLock != null) {
|
||||||
|
// lock exists but doesn't match ours
|
||||||
|
logger.error(
|
||||||
|
{ key, lock: existingLock },
|
||||||
|
'tried to release lock taken by force'
|
||||||
|
)
|
||||||
|
return callback()
|
||||||
|
} else {
|
||||||
|
logger.error(
|
||||||
|
{ key, lock: existingLock },
|
||||||
|
'tried to release lock that has gone'
|
||||||
|
)
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
runWithLock(key, runner, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return LockManager.getLock(key, function (error, lockValue) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return runner((error1, ...args) =>
|
||||||
|
LockManager.releaseLock(key, lockValue, function (error2) {
|
||||||
|
error = error1 || error2
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return callback(null, ...Array.from(args))
|
||||||
|
})
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
619
app/js/DockerRunner.js
Normal file
619
app/js/DockerRunner.js
Normal file
@@ -0,0 +1,619 @@
|
|||||||
|
const Settings = require('settings-sharelatex')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const Docker = require('dockerode')
|
||||||
|
const dockerode = new Docker()
|
||||||
|
const crypto = require('crypto')
|
||||||
|
const async = require('async')
|
||||||
|
const LockManager = require('./DockerLockManager')
|
||||||
|
const fs = require('fs')
|
||||||
|
const Path = require('path')
|
||||||
|
const _ = require('lodash')
|
||||||
|
|
||||||
|
const ONE_HOUR_IN_MS = 60 * 60 * 1000
|
||||||
|
logger.info('using docker runner')
|
||||||
|
|
||||||
|
function usingSiblingContainers() {
|
||||||
|
return (
|
||||||
|
Settings != null &&
|
||||||
|
Settings.path != null &&
|
||||||
|
Settings.path.sandboxedCompilesHostDir != null
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
let containerMonitorTimeout
|
||||||
|
let containerMonitorInterval
|
||||||
|
|
||||||
|
const DockerRunner = {
|
||||||
|
run(
|
||||||
|
projectId,
|
||||||
|
command,
|
||||||
|
directory,
|
||||||
|
image,
|
||||||
|
timeout,
|
||||||
|
environment,
|
||||||
|
compileGroup,
|
||||||
|
callback
|
||||||
|
) {
|
||||||
|
if (usingSiblingContainers()) {
|
||||||
|
const _newPath = Settings.path.sandboxedCompilesHostDir
|
||||||
|
logger.log(
|
||||||
|
{ path: _newPath },
|
||||||
|
'altering bind path for sibling containers'
|
||||||
|
)
|
||||||
|
// Server Pro, example:
|
||||||
|
// '/var/lib/sharelatex/data/compiles/<project-id>'
|
||||||
|
// ... becomes ...
|
||||||
|
// '/opt/sharelatex_data/data/compiles/<project-id>'
|
||||||
|
directory = Path.join(
|
||||||
|
Settings.path.sandboxedCompilesHostDir,
|
||||||
|
Path.basename(directory)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const volumes = { [directory]: '/compile' }
|
||||||
|
|
||||||
|
command = command.map((arg) =>
|
||||||
|
arg.toString().replace('$COMPILE_DIR', '/compile')
|
||||||
|
)
|
||||||
|
if (image == null) {
|
||||||
|
image = Settings.clsi.docker.image
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
Settings.clsi.docker.allowedImages &&
|
||||||
|
!Settings.clsi.docker.allowedImages.includes(image)
|
||||||
|
) {
|
||||||
|
return callback(new Error('image not allowed'))
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Settings.texliveImageNameOveride != null) {
|
||||||
|
const img = image.split('/')
|
||||||
|
image = `${Settings.texliveImageNameOveride}/${img[2]}`
|
||||||
|
}
|
||||||
|
|
||||||
|
const options = DockerRunner._getContainerOptions(
|
||||||
|
command,
|
||||||
|
image,
|
||||||
|
volumes,
|
||||||
|
timeout,
|
||||||
|
environment,
|
||||||
|
compileGroup
|
||||||
|
)
|
||||||
|
const fingerprint = DockerRunner._fingerprintContainer(options)
|
||||||
|
const name = `project-${projectId}-${fingerprint}`
|
||||||
|
options.name = name
|
||||||
|
|
||||||
|
// logOptions = _.clone(options)
|
||||||
|
// logOptions?.HostConfig?.SecurityOpt = "secomp used, removed in logging"
|
||||||
|
logger.log({ projectId }, 'running docker container')
|
||||||
|
DockerRunner._runAndWaitForContainer(
|
||||||
|
options,
|
||||||
|
volumes,
|
||||||
|
timeout,
|
||||||
|
(error, output) => {
|
||||||
|
if (error && error.statusCode === 500) {
|
||||||
|
logger.log(
|
||||||
|
{ err: error, projectId },
|
||||||
|
'error running container so destroying and retrying'
|
||||||
|
)
|
||||||
|
DockerRunner.destroyContainer(name, null, true, (error) => {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
DockerRunner._runAndWaitForContainer(
|
||||||
|
options,
|
||||||
|
volumes,
|
||||||
|
timeout,
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
callback(error, output)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
// pass back the container name to allow it to be killed
|
||||||
|
return name
|
||||||
|
},
|
||||||
|
|
||||||
|
kill(containerId, callback) {
|
||||||
|
logger.log({ containerId }, 'sending kill signal to container')
|
||||||
|
const container = dockerode.getContainer(containerId)
|
||||||
|
container.kill((error) => {
|
||||||
|
if (
|
||||||
|
error != null &&
|
||||||
|
error.message != null &&
|
||||||
|
error.message.match(/Cannot kill container .* is not running/)
|
||||||
|
) {
|
||||||
|
logger.warn(
|
||||||
|
{ err: error, containerId },
|
||||||
|
'container not running, continuing'
|
||||||
|
)
|
||||||
|
error = null
|
||||||
|
}
|
||||||
|
if (error != null) {
|
||||||
|
logger.error({ err: error, containerId }, 'error killing container')
|
||||||
|
callback(error)
|
||||||
|
} else {
|
||||||
|
callback()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_runAndWaitForContainer(options, volumes, timeout, _callback) {
|
||||||
|
const callback = _.once(_callback)
|
||||||
|
const { name } = options
|
||||||
|
|
||||||
|
let streamEnded = false
|
||||||
|
let containerReturned = false
|
||||||
|
let output = {}
|
||||||
|
|
||||||
|
function callbackIfFinished() {
|
||||||
|
if (streamEnded && containerReturned) {
|
||||||
|
callback(null, output)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function attachStreamHandler(error, _output) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
output = _output
|
||||||
|
streamEnded = true
|
||||||
|
callbackIfFinished()
|
||||||
|
}
|
||||||
|
|
||||||
|
DockerRunner.startContainer(
|
||||||
|
options,
|
||||||
|
volumes,
|
||||||
|
attachStreamHandler,
|
||||||
|
(error, containerId) => {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
|
||||||
|
DockerRunner.waitForContainer(name, timeout, (error, exitCode) => {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
if (exitCode === 137) {
|
||||||
|
// exit status from kill -9
|
||||||
|
const err = new Error('terminated')
|
||||||
|
err.terminated = true
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
if (exitCode === 1) {
|
||||||
|
// exit status from chktex
|
||||||
|
const err = new Error('exited')
|
||||||
|
err.code = exitCode
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
containerReturned = true
|
||||||
|
if (options != null && options.HostConfig != null) {
|
||||||
|
options.HostConfig.SecurityOpt = null
|
||||||
|
}
|
||||||
|
logger.log({ exitCode, options }, 'docker container has exited')
|
||||||
|
callbackIfFinished()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
_getContainerOptions(
|
||||||
|
command,
|
||||||
|
image,
|
||||||
|
volumes,
|
||||||
|
timeout,
|
||||||
|
environment,
|
||||||
|
compileGroup
|
||||||
|
) {
|
||||||
|
const timeoutInSeconds = timeout / 1000
|
||||||
|
|
||||||
|
const dockerVolumes = {}
|
||||||
|
for (const hostVol in volumes) {
|
||||||
|
const dockerVol = volumes[hostVol]
|
||||||
|
dockerVolumes[dockerVol] = {}
|
||||||
|
|
||||||
|
if (volumes[hostVol].slice(-3).indexOf(':r') === -1) {
|
||||||
|
volumes[hostVol] = `${dockerVol}:rw`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// merge settings and environment parameter
|
||||||
|
const env = {}
|
||||||
|
for (const src of [Settings.clsi.docker.env, environment || {}]) {
|
||||||
|
for (const key in src) {
|
||||||
|
const value = src[key]
|
||||||
|
env[key] = value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// set the path based on the image year
|
||||||
|
const match = image.match(/:([0-9]+)\.[0-9]+/)
|
||||||
|
const year = match ? match[1] : '2014'
|
||||||
|
env.PATH = `/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/${year}/bin/x86_64-linux/`
|
||||||
|
const options = {
|
||||||
|
Cmd: command,
|
||||||
|
Image: image,
|
||||||
|
Volumes: dockerVolumes,
|
||||||
|
WorkingDir: '/compile',
|
||||||
|
NetworkDisabled: true,
|
||||||
|
Memory: 1024 * 1024 * 1024 * 1024, // 1 Gb
|
||||||
|
User: Settings.clsi.docker.user,
|
||||||
|
Env: Object.entries(env).map(([key, value]) => `${key}=${value}`),
|
||||||
|
HostConfig: {
|
||||||
|
Binds: Object.entries(volumes).map(
|
||||||
|
([hostVol, dockerVol]) => `${hostVol}:${dockerVol}`
|
||||||
|
),
|
||||||
|
LogConfig: { Type: 'none', Config: {} },
|
||||||
|
Ulimits: [
|
||||||
|
{
|
||||||
|
Name: 'cpu',
|
||||||
|
Soft: timeoutInSeconds + 5,
|
||||||
|
Hard: timeoutInSeconds + 10
|
||||||
|
}
|
||||||
|
],
|
||||||
|
CapDrop: 'ALL',
|
||||||
|
SecurityOpt: ['no-new-privileges']
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Settings.path != null && Settings.path.synctexBinHostPath != null) {
|
||||||
|
options.HostConfig.Binds.push(
|
||||||
|
`${Settings.path.synctexBinHostPath}:/opt/synctex:ro`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Settings.clsi.docker.seccomp_profile != null) {
|
||||||
|
options.HostConfig.SecurityOpt.push(
|
||||||
|
`seccomp=${Settings.clsi.docker.seccomp_profile}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Settings.clsi.docker.runtime) {
|
||||||
|
options.HostConfig.Runtime = Settings.clsi.docker.runtime
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Settings.clsi.docker.Readonly) {
|
||||||
|
options.HostConfig.ReadonlyRootfs = true
|
||||||
|
options.HostConfig.Tmpfs = { '/tmp': 'rw,noexec,nosuid,size=65536k' }
|
||||||
|
options.Volumes['/home/tex'] = {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Allow per-compile group overriding of individual settings
|
||||||
|
if (
|
||||||
|
Settings.clsi.docker.compileGroupConfig &&
|
||||||
|
Settings.clsi.docker.compileGroupConfig[compileGroup]
|
||||||
|
) {
|
||||||
|
const override = Settings.clsi.docker.compileGroupConfig[compileGroup]
|
||||||
|
for (const key in override) {
|
||||||
|
_.set(options, key, override[key])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return options
|
||||||
|
},
|
||||||
|
|
||||||
|
_fingerprintContainer(containerOptions) {
|
||||||
|
// Yay, Hashing!
|
||||||
|
const json = JSON.stringify(containerOptions)
|
||||||
|
return crypto.createHash('md5').update(json).digest('hex')
|
||||||
|
},
|
||||||
|
|
||||||
|
startContainer(options, volumes, attachStreamHandler, callback) {
|
||||||
|
LockManager.runWithLock(
|
||||||
|
options.name,
|
||||||
|
(releaseLock) =>
|
||||||
|
// Check that volumes exist before starting the container.
|
||||||
|
// When a container is started with volume pointing to a
|
||||||
|
// non-existent directory then docker creates the directory but
|
||||||
|
// with root ownership.
|
||||||
|
DockerRunner._checkVolumes(options, volumes, (err) => {
|
||||||
|
if (err != null) {
|
||||||
|
return releaseLock(err)
|
||||||
|
}
|
||||||
|
DockerRunner._startContainer(
|
||||||
|
options,
|
||||||
|
volumes,
|
||||||
|
attachStreamHandler,
|
||||||
|
releaseLock
|
||||||
|
)
|
||||||
|
}),
|
||||||
|
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
// Check that volumes exist and are directories
|
||||||
|
_checkVolumes(options, volumes, callback) {
|
||||||
|
if (usingSiblingContainers()) {
|
||||||
|
// Server Pro, with sibling-containers active, skip checks
|
||||||
|
return callback(null)
|
||||||
|
}
|
||||||
|
|
||||||
|
const checkVolume = (path, cb) =>
|
||||||
|
fs.stat(path, (err, stats) => {
|
||||||
|
if (err != null) {
|
||||||
|
return cb(err)
|
||||||
|
}
|
||||||
|
if (!stats.isDirectory()) {
|
||||||
|
return cb(new Error('not a directory'))
|
||||||
|
}
|
||||||
|
cb()
|
||||||
|
})
|
||||||
|
const jobs = []
|
||||||
|
for (const vol in volumes) {
|
||||||
|
jobs.push((cb) => checkVolume(vol, cb))
|
||||||
|
}
|
||||||
|
async.series(jobs, callback)
|
||||||
|
},
|
||||||
|
|
||||||
|
_startContainer(options, volumes, attachStreamHandler, callback) {
|
||||||
|
callback = _.once(callback)
|
||||||
|
const { name } = options
|
||||||
|
|
||||||
|
logger.log({ container_name: name }, 'starting container')
|
||||||
|
const container = dockerode.getContainer(name)
|
||||||
|
|
||||||
|
function createAndStartContainer() {
|
||||||
|
dockerode.createContainer(options, (error, container) => {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
startExistingContainer()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function startExistingContainer() {
|
||||||
|
DockerRunner.attachToContainer(
|
||||||
|
options.name,
|
||||||
|
attachStreamHandler,
|
||||||
|
(error) => {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
container.start((error) => {
|
||||||
|
if (error != null && error.statusCode !== 304) {
|
||||||
|
callback(error)
|
||||||
|
} else {
|
||||||
|
// already running
|
||||||
|
callback()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
container.inspect((error, stats) => {
|
||||||
|
if (error != null && error.statusCode === 404) {
|
||||||
|
createAndStartContainer()
|
||||||
|
} else if (error != null) {
|
||||||
|
logger.err(
|
||||||
|
{ container_name: name, error },
|
||||||
|
'unable to inspect container to start'
|
||||||
|
)
|
||||||
|
callback(error)
|
||||||
|
} else {
|
||||||
|
startExistingContainer()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
attachToContainer(containerId, attachStreamHandler, attachStartCallback) {
|
||||||
|
const container = dockerode.getContainer(containerId)
|
||||||
|
container.attach({ stdout: 1, stderr: 1, stream: 1 }, (error, stream) => {
|
||||||
|
if (error != null) {
|
||||||
|
logger.error(
|
||||||
|
{ err: error, containerId },
|
||||||
|
'error attaching to container'
|
||||||
|
)
|
||||||
|
return attachStartCallback(error)
|
||||||
|
} else {
|
||||||
|
attachStartCallback()
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.log({ containerId }, 'attached to container')
|
||||||
|
|
||||||
|
const MAX_OUTPUT = 1024 * 1024 // limit output to 1MB
|
||||||
|
function createStringOutputStream(name) {
|
||||||
|
return {
|
||||||
|
data: '',
|
||||||
|
overflowed: false,
|
||||||
|
write(data) {
|
||||||
|
if (this.overflowed) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (this.data.length < MAX_OUTPUT) {
|
||||||
|
this.data += data
|
||||||
|
} else {
|
||||||
|
logger.error(
|
||||||
|
{
|
||||||
|
containerId,
|
||||||
|
length: this.data.length,
|
||||||
|
maxLen: MAX_OUTPUT
|
||||||
|
},
|
||||||
|
`${name} exceeds max size`
|
||||||
|
)
|
||||||
|
this.data += `(...truncated at ${MAX_OUTPUT} chars...)`
|
||||||
|
this.overflowed = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// kill container if too much output
|
||||||
|
// docker.containers.kill(containerId, () ->)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const stdout = createStringOutputStream('stdout')
|
||||||
|
const stderr = createStringOutputStream('stderr')
|
||||||
|
|
||||||
|
container.modem.demuxStream(stream, stdout, stderr)
|
||||||
|
|
||||||
|
stream.on('error', (err) =>
|
||||||
|
logger.error(
|
||||||
|
{ err, containerId },
|
||||||
|
'error reading from container stream'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
stream.on('end', () =>
|
||||||
|
attachStreamHandler(null, { stdout: stdout.data, stderr: stderr.data })
|
||||||
|
)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
waitForContainer(containerId, timeout, _callback) {
|
||||||
|
const callback = _.once(_callback)
|
||||||
|
|
||||||
|
const container = dockerode.getContainer(containerId)
|
||||||
|
|
||||||
|
let timedOut = false
|
||||||
|
const timeoutId = setTimeout(() => {
|
||||||
|
timedOut = true
|
||||||
|
logger.log({ containerId }, 'timeout reached, killing container')
|
||||||
|
container.kill((err) => {
|
||||||
|
logger.warn({ err, containerId }, 'failed to kill container')
|
||||||
|
})
|
||||||
|
}, timeout)
|
||||||
|
|
||||||
|
logger.log({ containerId }, 'waiting for docker container')
|
||||||
|
container.wait((error, res) => {
|
||||||
|
if (error != null) {
|
||||||
|
clearTimeout(timeoutId)
|
||||||
|
logger.error({ err: error, containerId }, 'error waiting for container')
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
if (timedOut) {
|
||||||
|
logger.log({ containerId }, 'docker container timed out')
|
||||||
|
error = new Error('container timed out')
|
||||||
|
error.timedout = true
|
||||||
|
callback(error)
|
||||||
|
} else {
|
||||||
|
clearTimeout(timeoutId)
|
||||||
|
logger.log(
|
||||||
|
{ containerId, exitCode: res.StatusCode },
|
||||||
|
'docker container returned'
|
||||||
|
)
|
||||||
|
callback(null, res.StatusCode)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
destroyContainer(containerName, containerId, shouldForce, callback) {
|
||||||
|
// We want the containerName for the lock and, ideally, the
|
||||||
|
// containerId to delete. There is a bug in the docker.io module
|
||||||
|
// where if you delete by name and there is an error, it throws an
|
||||||
|
// async exception, but if you delete by id it just does a normal
|
||||||
|
// error callback. We fall back to deleting by name if no id is
|
||||||
|
// supplied.
|
||||||
|
LockManager.runWithLock(
|
||||||
|
containerName,
|
||||||
|
(releaseLock) =>
|
||||||
|
DockerRunner._destroyContainer(
|
||||||
|
containerId || containerName,
|
||||||
|
shouldForce,
|
||||||
|
releaseLock
|
||||||
|
),
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
_destroyContainer(containerId, shouldForce, callback) {
|
||||||
|
logger.log({ containerId }, 'destroying docker container')
|
||||||
|
const container = dockerode.getContainer(containerId)
|
||||||
|
container.remove({ force: shouldForce === true, v: true }, (error) => {
|
||||||
|
if (error != null && error.statusCode === 404) {
|
||||||
|
logger.warn(
|
||||||
|
{ err: error, containerId },
|
||||||
|
'container not found, continuing'
|
||||||
|
)
|
||||||
|
error = null
|
||||||
|
}
|
||||||
|
if (error != null) {
|
||||||
|
logger.error({ err: error, containerId }, 'error destroying container')
|
||||||
|
} else {
|
||||||
|
logger.log({ containerId }, 'destroyed container')
|
||||||
|
}
|
||||||
|
callback(error)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
// handle expiry of docker containers
|
||||||
|
|
||||||
|
MAX_CONTAINER_AGE: Settings.clsi.docker.maxContainerAge || ONE_HOUR_IN_MS,
|
||||||
|
|
||||||
|
examineOldContainer(container, callback) {
|
||||||
|
const name = container.Name || (container.Names && container.Names[0])
|
||||||
|
const created = container.Created * 1000 // creation time is returned in seconds
|
||||||
|
const now = Date.now()
|
||||||
|
const age = now - created
|
||||||
|
const maxAge = DockerRunner.MAX_CONTAINER_AGE
|
||||||
|
const ttl = maxAge - age
|
||||||
|
logger.log(
|
||||||
|
{ containerName: name, created, now, age, maxAge, ttl },
|
||||||
|
'checking whether to destroy container'
|
||||||
|
)
|
||||||
|
return { name, id: container.Id, ttl }
|
||||||
|
},
|
||||||
|
|
||||||
|
destroyOldContainers(callback) {
|
||||||
|
dockerode.listContainers({ all: true }, (error, containers) => {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
const jobs = []
|
||||||
|
for (const container of containers) {
|
||||||
|
const { name, id, ttl } = DockerRunner.examineOldContainer(container)
|
||||||
|
if (name.slice(0, 9) === '/project-' && ttl <= 0) {
|
||||||
|
// strip the / prefix
|
||||||
|
// the LockManager uses the plain container name
|
||||||
|
const plainName = name.slice(1)
|
||||||
|
jobs.push((cb) =>
|
||||||
|
DockerRunner.destroyContainer(plainName, id, false, () => cb())
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Ignore errors because some containers get stuck but
|
||||||
|
// will be destroyed next time
|
||||||
|
async.series(jobs, callback)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
startContainerMonitor() {
|
||||||
|
logger.log(
|
||||||
|
{ maxAge: DockerRunner.MAX_CONTAINER_AGE },
|
||||||
|
'starting container expiry'
|
||||||
|
)
|
||||||
|
|
||||||
|
// guarantee only one monitor is running
|
||||||
|
DockerRunner.stopContainerMonitor()
|
||||||
|
|
||||||
|
// randomise the start time
|
||||||
|
const randomDelay = Math.floor(Math.random() * 5 * 60 * 1000)
|
||||||
|
containerMonitorTimeout = setTimeout(() => {
|
||||||
|
containerMonitorInterval = setInterval(
|
||||||
|
() =>
|
||||||
|
DockerRunner.destroyOldContainers((err) => {
|
||||||
|
if (err) {
|
||||||
|
logger.error({ err }, 'failed to destroy old containers')
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
ONE_HOUR_IN_MS
|
||||||
|
)
|
||||||
|
}, randomDelay)
|
||||||
|
},
|
||||||
|
|
||||||
|
stopContainerMonitor() {
|
||||||
|
if (containerMonitorTimeout) {
|
||||||
|
clearTimeout(containerMonitorTimeout)
|
||||||
|
containerMonitorTimeout = undefined
|
||||||
|
}
|
||||||
|
if (containerMonitorInterval) {
|
||||||
|
clearInterval(containerMonitorInterval)
|
||||||
|
containerMonitorInterval = undefined
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
DockerRunner.startContainerMonitor()
|
||||||
|
|
||||||
|
module.exports = DockerRunner
|
||||||
57
app/js/DraftModeManager.js
Normal file
57
app/js/DraftModeManager.js
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
handle-callback-err,
|
||||||
|
no-useless-escape,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let DraftModeManager
|
||||||
|
const fs = require('fs')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
|
||||||
|
module.exports = DraftModeManager = {
|
||||||
|
injectDraftMode(filename, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return fs.readFile(filename, 'utf8', function (error, content) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
// avoid adding draft mode more than once
|
||||||
|
if (
|
||||||
|
(content != null
|
||||||
|
? content.indexOf('\\documentclass[draft')
|
||||||
|
: undefined) >= 0
|
||||||
|
) {
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
const modified_content = DraftModeManager._injectDraftOption(content)
|
||||||
|
logger.log(
|
||||||
|
{
|
||||||
|
content: content.slice(0, 1024), // \documentclass is normally v near the top
|
||||||
|
modified_content: modified_content.slice(0, 1024),
|
||||||
|
filename
|
||||||
|
},
|
||||||
|
'injected draft class'
|
||||||
|
)
|
||||||
|
return fs.writeFile(filename, modified_content, callback)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_injectDraftOption(content) {
|
||||||
|
return (
|
||||||
|
content
|
||||||
|
// With existing options (must be first, otherwise both are applied)
|
||||||
|
.replace(/\\documentclass\[/g, '\\documentclass[draft,')
|
||||||
|
// Without existing options
|
||||||
|
.replace(/\\documentclass\{/g, '\\documentclass[draft]{')
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
36
app/js/Errors.js
Normal file
36
app/js/Errors.js
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
/* eslint-disable
|
||||||
|
no-proto,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
let Errors
|
||||||
|
var NotFoundError = function (message) {
|
||||||
|
const error = new Error(message)
|
||||||
|
error.name = 'NotFoundError'
|
||||||
|
error.__proto__ = NotFoundError.prototype
|
||||||
|
return error
|
||||||
|
}
|
||||||
|
NotFoundError.prototype.__proto__ = Error.prototype
|
||||||
|
|
||||||
|
var FilesOutOfSyncError = function (message) {
|
||||||
|
const error = new Error(message)
|
||||||
|
error.name = 'FilesOutOfSyncError'
|
||||||
|
error.__proto__ = FilesOutOfSyncError.prototype
|
||||||
|
return error
|
||||||
|
}
|
||||||
|
FilesOutOfSyncError.prototype.__proto__ = Error.prototype
|
||||||
|
|
||||||
|
var AlreadyCompilingError = function (message) {
|
||||||
|
const error = new Error(message)
|
||||||
|
error.name = 'AlreadyCompilingError'
|
||||||
|
error.__proto__ = AlreadyCompilingError.prototype
|
||||||
|
return error
|
||||||
|
}
|
||||||
|
AlreadyCompilingError.prototype.__proto__ = Error.prototype
|
||||||
|
|
||||||
|
module.exports = Errors = {
|
||||||
|
NotFoundError,
|
||||||
|
FilesOutOfSyncError,
|
||||||
|
AlreadyCompilingError
|
||||||
|
}
|
||||||
243
app/js/LatexRunner.js
Normal file
243
app/js/LatexRunner.js
Normal file
@@ -0,0 +1,243 @@
|
|||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
handle-callback-err,
|
||||||
|
no-return-assign,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS103: Rewrite code to no longer use __guard__
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let LatexRunner
|
||||||
|
const Path = require('path')
|
||||||
|
const Settings = require('settings-sharelatex')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const Metrics = require('./Metrics')
|
||||||
|
const CommandRunner = require('./CommandRunner')
|
||||||
|
const fs = require('fs')
|
||||||
|
|
||||||
|
const ProcessTable = {} // table of currently running jobs (pids or docker container names)
|
||||||
|
|
||||||
|
module.exports = LatexRunner = {
|
||||||
|
runLatex(project_id, options, callback) {
|
||||||
|
let command
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
let {
|
||||||
|
directory,
|
||||||
|
mainFile,
|
||||||
|
compiler,
|
||||||
|
timeout,
|
||||||
|
image,
|
||||||
|
environment,
|
||||||
|
flags,
|
||||||
|
compileGroup
|
||||||
|
} = options
|
||||||
|
if (!compiler) {
|
||||||
|
compiler = 'pdflatex'
|
||||||
|
}
|
||||||
|
if (!timeout) {
|
||||||
|
timeout = 60000
|
||||||
|
} // milliseconds
|
||||||
|
|
||||||
|
logger.log(
|
||||||
|
{
|
||||||
|
directory,
|
||||||
|
compiler,
|
||||||
|
timeout,
|
||||||
|
mainFile,
|
||||||
|
environment,
|
||||||
|
flags,
|
||||||
|
compileGroup
|
||||||
|
},
|
||||||
|
'starting compile'
|
||||||
|
)
|
||||||
|
|
||||||
|
// We want to run latexmk on the tex file which we will automatically
|
||||||
|
// generate from the Rtex/Rmd/md file.
|
||||||
|
mainFile = mainFile.replace(/\.(Rtex|md|Rmd)$/, '.tex')
|
||||||
|
|
||||||
|
if (compiler === 'pdflatex') {
|
||||||
|
command = LatexRunner._pdflatexCommand(mainFile, flags)
|
||||||
|
} else if (compiler === 'latex') {
|
||||||
|
command = LatexRunner._latexCommand(mainFile, flags)
|
||||||
|
} else if (compiler === 'xelatex') {
|
||||||
|
command = LatexRunner._xelatexCommand(mainFile, flags)
|
||||||
|
} else if (compiler === 'lualatex') {
|
||||||
|
command = LatexRunner._lualatexCommand(mainFile, flags)
|
||||||
|
} else {
|
||||||
|
return callback(new Error(`unknown compiler: ${compiler}`))
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Settings.clsi != null ? Settings.clsi.strace : undefined) {
|
||||||
|
command = ['strace', '-o', 'strace', '-ff'].concat(command)
|
||||||
|
}
|
||||||
|
|
||||||
|
const id = `${project_id}` // record running project under this id
|
||||||
|
|
||||||
|
return (ProcessTable[id] = CommandRunner.run(
|
||||||
|
project_id,
|
||||||
|
command,
|
||||||
|
directory,
|
||||||
|
image,
|
||||||
|
timeout,
|
||||||
|
environment,
|
||||||
|
compileGroup,
|
||||||
|
function (error, output) {
|
||||||
|
delete ProcessTable[id]
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
const runs =
|
||||||
|
__guard__(
|
||||||
|
__guard__(output != null ? output.stderr : undefined, (x1) =>
|
||||||
|
x1.match(/^Run number \d+ of .*latex/gm)
|
||||||
|
),
|
||||||
|
(x) => x.length
|
||||||
|
) || 0
|
||||||
|
const failed =
|
||||||
|
__guard__(output != null ? output.stdout : undefined, (x2) =>
|
||||||
|
x2.match(/^Latexmk: Errors/m)
|
||||||
|
) != null
|
||||||
|
? 1
|
||||||
|
: 0
|
||||||
|
// counters from latexmk output
|
||||||
|
const stats = {}
|
||||||
|
stats['latexmk-errors'] = failed
|
||||||
|
stats['latex-runs'] = runs
|
||||||
|
stats['latex-runs-with-errors'] = failed ? runs : 0
|
||||||
|
stats[`latex-runs-${runs}`] = 1
|
||||||
|
stats[`latex-runs-with-errors-${runs}`] = failed ? 1 : 0
|
||||||
|
// timing information from /usr/bin/time
|
||||||
|
const timings = {}
|
||||||
|
const stderr = output != null ? output.stderr : undefined
|
||||||
|
timings['cpu-percent'] =
|
||||||
|
__guard__(
|
||||||
|
stderr != null
|
||||||
|
? stderr.match(/Percent of CPU this job got: (\d+)/m)
|
||||||
|
: undefined,
|
||||||
|
(x3) => x3[1]
|
||||||
|
) || 0
|
||||||
|
timings['cpu-time'] =
|
||||||
|
__guard__(
|
||||||
|
stderr != null
|
||||||
|
? stderr.match(/User time.*: (\d+.\d+)/m)
|
||||||
|
: undefined,
|
||||||
|
(x4) => x4[1]
|
||||||
|
) || 0
|
||||||
|
timings['sys-time'] =
|
||||||
|
__guard__(
|
||||||
|
stderr != null
|
||||||
|
? stderr.match(/System time.*: (\d+.\d+)/m)
|
||||||
|
: undefined,
|
||||||
|
(x5) => x5[1]
|
||||||
|
) || 0
|
||||||
|
// record output files
|
||||||
|
LatexRunner.writeLogOutput(project_id, directory, output, () => {
|
||||||
|
return callback(error, output, stats, timings)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
))
|
||||||
|
},
|
||||||
|
|
||||||
|
writeLogOutput(project_id, directory, output, callback) {
|
||||||
|
if (!output) {
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
// internal method for writing non-empty log files
|
||||||
|
function _writeFile(file, content, cb) {
|
||||||
|
if (content && content.length > 0) {
|
||||||
|
fs.writeFile(file, content, (err) => {
|
||||||
|
if (err) {
|
||||||
|
logger.error({ project_id, file }, 'error writing log file') // don't fail on error
|
||||||
|
}
|
||||||
|
cb()
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// write stdout and stderr, ignoring errors
|
||||||
|
_writeFile(Path.join(directory, 'output.stdout'), output.stdout, () => {
|
||||||
|
_writeFile(Path.join(directory, 'output.stderr'), output.stderr, () => {
|
||||||
|
callback()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
killLatex(project_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
const id = `${project_id}`
|
||||||
|
logger.log({ id }, 'killing running compile')
|
||||||
|
if (ProcessTable[id] == null) {
|
||||||
|
logger.warn({ id }, 'no such project to kill')
|
||||||
|
return callback(null)
|
||||||
|
} else {
|
||||||
|
return CommandRunner.kill(ProcessTable[id], callback)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
_latexmkBaseCommand(flags) {
|
||||||
|
let args = [
|
||||||
|
'latexmk',
|
||||||
|
'-cd',
|
||||||
|
'-f',
|
||||||
|
'-jobname=output',
|
||||||
|
'-auxdir=$COMPILE_DIR',
|
||||||
|
'-outdir=$COMPILE_DIR',
|
||||||
|
'-synctex=1',
|
||||||
|
'-interaction=batchmode'
|
||||||
|
]
|
||||||
|
if (flags) {
|
||||||
|
args = args.concat(flags)
|
||||||
|
}
|
||||||
|
return (
|
||||||
|
__guard__(
|
||||||
|
Settings != null ? Settings.clsi : undefined,
|
||||||
|
(x) => x.latexmkCommandPrefix
|
||||||
|
) || []
|
||||||
|
).concat(args)
|
||||||
|
},
|
||||||
|
|
||||||
|
_pdflatexCommand(mainFile, flags) {
|
||||||
|
return LatexRunner._latexmkBaseCommand(flags).concat([
|
||||||
|
'-pdf',
|
||||||
|
Path.join('$COMPILE_DIR', mainFile)
|
||||||
|
])
|
||||||
|
},
|
||||||
|
|
||||||
|
_latexCommand(mainFile, flags) {
|
||||||
|
return LatexRunner._latexmkBaseCommand(flags).concat([
|
||||||
|
'-pdfdvi',
|
||||||
|
Path.join('$COMPILE_DIR', mainFile)
|
||||||
|
])
|
||||||
|
},
|
||||||
|
|
||||||
|
_xelatexCommand(mainFile, flags) {
|
||||||
|
return LatexRunner._latexmkBaseCommand(flags).concat([
|
||||||
|
'-xelatex',
|
||||||
|
Path.join('$COMPILE_DIR', mainFile)
|
||||||
|
])
|
||||||
|
},
|
||||||
|
|
||||||
|
_lualatexCommand(mainFile, flags) {
|
||||||
|
return LatexRunner._latexmkBaseCommand(flags).concat([
|
||||||
|
'-lualatex',
|
||||||
|
Path.join('$COMPILE_DIR', mainFile)
|
||||||
|
])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function __guard__(value, transform) {
|
||||||
|
return typeof value !== 'undefined' && value !== null
|
||||||
|
? transform(value)
|
||||||
|
: undefined
|
||||||
|
}
|
||||||
103
app/js/LocalCommandRunner.js
Normal file
103
app/js/LocalCommandRunner.js
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
handle-callback-err,
|
||||||
|
no-return-assign,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS101: Remove unnecessary use of Array.from
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let CommandRunner
|
||||||
|
const { spawn } = require('child_process')
|
||||||
|
const _ = require('underscore')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
|
||||||
|
logger.info('using standard command runner')
|
||||||
|
|
||||||
|
module.exports = CommandRunner = {
|
||||||
|
run(
|
||||||
|
project_id,
|
||||||
|
command,
|
||||||
|
directory,
|
||||||
|
image,
|
||||||
|
timeout,
|
||||||
|
environment,
|
||||||
|
compileGroup,
|
||||||
|
callback
|
||||||
|
) {
|
||||||
|
let key, value
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
} else {
|
||||||
|
callback = _.once(callback)
|
||||||
|
}
|
||||||
|
command = Array.from(command).map((arg) =>
|
||||||
|
arg.toString().replace('$COMPILE_DIR', directory)
|
||||||
|
)
|
||||||
|
logger.log({ project_id, command, directory }, 'running command')
|
||||||
|
logger.warn('timeouts and sandboxing are not enabled with CommandRunner')
|
||||||
|
|
||||||
|
// merge environment settings
|
||||||
|
const env = {}
|
||||||
|
for (key in process.env) {
|
||||||
|
value = process.env[key]
|
||||||
|
env[key] = value
|
||||||
|
}
|
||||||
|
for (key in environment) {
|
||||||
|
value = environment[key]
|
||||||
|
env[key] = value
|
||||||
|
}
|
||||||
|
|
||||||
|
// run command as detached process so it has its own process group (which can be killed if needed)
|
||||||
|
const proc = spawn(command[0], command.slice(1), { cwd: directory, env })
|
||||||
|
|
||||||
|
let stdout = ''
|
||||||
|
proc.stdout.setEncoding('utf8').on('data', (data) => (stdout += data))
|
||||||
|
|
||||||
|
proc.on('error', function (err) {
|
||||||
|
logger.err(
|
||||||
|
{ err, project_id, command, directory },
|
||||||
|
'error running command'
|
||||||
|
)
|
||||||
|
return callback(err)
|
||||||
|
})
|
||||||
|
|
||||||
|
proc.on('close', function (code, signal) {
|
||||||
|
let err
|
||||||
|
logger.info({ code, signal, project_id }, 'command exited')
|
||||||
|
if (signal === 'SIGTERM') {
|
||||||
|
// signal from kill method below
|
||||||
|
err = new Error('terminated')
|
||||||
|
err.terminated = true
|
||||||
|
return callback(err)
|
||||||
|
} else if (code === 1) {
|
||||||
|
// exit status from chktex
|
||||||
|
err = new Error('exited')
|
||||||
|
err.code = code
|
||||||
|
return callback(err)
|
||||||
|
} else {
|
||||||
|
return callback(null, { stdout: stdout })
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return proc.pid
|
||||||
|
}, // return process id to allow job to be killed if necessary
|
||||||
|
|
||||||
|
kill(pid, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
process.kill(-pid) // kill all processes in group
|
||||||
|
} catch (err) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
}
|
||||||
72
app/js/LockManager.js
Normal file
72
app/js/LockManager.js
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
/* eslint-disable
|
||||||
|
handle-callback-err,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS101: Remove unnecessary use of Array.from
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let LockManager
|
||||||
|
const Settings = require('settings-sharelatex')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const Lockfile = require('lockfile') // from https://github.com/npm/lockfile
|
||||||
|
const Errors = require('./Errors')
|
||||||
|
const fs = require('fs')
|
||||||
|
const Path = require('path')
|
||||||
|
module.exports = LockManager = {
|
||||||
|
LOCK_TEST_INTERVAL: 1000, // 50ms between each test of the lock
|
||||||
|
MAX_LOCK_WAIT_TIME: 15000, // 10s maximum time to spend trying to get the lock
|
||||||
|
LOCK_STALE: 5 * 60 * 1000, // 5 mins time until lock auto expires
|
||||||
|
|
||||||
|
runWithLock(path, runner, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
const lockOpts = {
|
||||||
|
wait: this.MAX_LOCK_WAIT_TIME,
|
||||||
|
pollPeriod: this.LOCK_TEST_INTERVAL,
|
||||||
|
stale: this.LOCK_STALE
|
||||||
|
}
|
||||||
|
return Lockfile.lock(path, lockOpts, function (error) {
|
||||||
|
if ((error != null ? error.code : undefined) === 'EEXIST') {
|
||||||
|
return callback(new Errors.AlreadyCompilingError('compile in progress'))
|
||||||
|
} else if (error != null) {
|
||||||
|
return fs.lstat(path, (statLockErr, statLock) =>
|
||||||
|
fs.lstat(Path.dirname(path), (statDirErr, statDir) =>
|
||||||
|
fs.readdir(Path.dirname(path), function (readdirErr, readdirDir) {
|
||||||
|
logger.err(
|
||||||
|
{
|
||||||
|
error,
|
||||||
|
path,
|
||||||
|
statLock,
|
||||||
|
statLockErr,
|
||||||
|
statDir,
|
||||||
|
statDirErr,
|
||||||
|
readdirErr,
|
||||||
|
readdirDir
|
||||||
|
},
|
||||||
|
'unable to get lock'
|
||||||
|
)
|
||||||
|
return callback(error)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
return runner((error1, ...args) =>
|
||||||
|
Lockfile.unlock(path, function (error2) {
|
||||||
|
error = error1 || error2
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return callback(null, ...Array.from(args))
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
3
app/js/Metrics.js
Normal file
3
app/js/Metrics.js
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Sanity-check the conversion and remove this comment.
|
||||||
|
module.exports = require('metrics-sharelatex')
|
||||||
402
app/js/OutputCacheManager.js
Normal file
402
app/js/OutputCacheManager.js
Normal file
@@ -0,0 +1,402 @@
|
|||||||
|
/* eslint-disable
|
||||||
|
handle-callback-err,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS101: Remove unnecessary use of Array.from
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS103: Rewrite code to no longer use __guard__
|
||||||
|
* DS104: Avoid inline assignments
|
||||||
|
* DS204: Change includes calls to have a more natural evaluation order
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let OutputCacheManager
|
||||||
|
const async = require('async')
|
||||||
|
const fs = require('fs')
|
||||||
|
const fse = require('fs-extra')
|
||||||
|
const Path = require('path')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const _ = require('lodash')
|
||||||
|
const Settings = require('settings-sharelatex')
|
||||||
|
const crypto = require('crypto')
|
||||||
|
|
||||||
|
const OutputFileOptimiser = require('./OutputFileOptimiser')
|
||||||
|
|
||||||
|
module.exports = OutputCacheManager = {
|
||||||
|
CACHE_SUBDIR: '.cache/clsi',
|
||||||
|
ARCHIVE_SUBDIR: '.archive/clsi',
|
||||||
|
// build id is HEXDATE-HEXRANDOM from Date.now()and RandomBytes
|
||||||
|
// for backwards compatibility, make the randombytes part optional
|
||||||
|
BUILD_REGEX: /^[0-9a-f]+(-[0-9a-f]+)?$/,
|
||||||
|
CACHE_LIMIT: 2, // maximum number of cache directories
|
||||||
|
CACHE_AGE: 60 * 60 * 1000, // up to one hour old
|
||||||
|
|
||||||
|
path(buildId, file) {
|
||||||
|
// used by static server, given build id return '.cache/clsi/buildId'
|
||||||
|
if (buildId.match(OutputCacheManager.BUILD_REGEX)) {
|
||||||
|
return Path.join(OutputCacheManager.CACHE_SUBDIR, buildId, file)
|
||||||
|
} else {
|
||||||
|
// for invalid build id, return top level
|
||||||
|
return file
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
generateBuildId(callback) {
|
||||||
|
// generate a secure build id from Date.now() and 8 random bytes in hex
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, buildId) {}
|
||||||
|
}
|
||||||
|
return crypto.randomBytes(8, function (err, buf) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
const random = buf.toString('hex')
|
||||||
|
const date = Date.now().toString(16)
|
||||||
|
return callback(err, `${date}-${random}`)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
saveOutputFiles(outputFiles, compileDir, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return OutputCacheManager.generateBuildId(function (err, buildId) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
return OutputCacheManager.saveOutputFilesInBuildDir(
|
||||||
|
outputFiles,
|
||||||
|
compileDir,
|
||||||
|
buildId,
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
saveOutputFilesInBuildDir(outputFiles, compileDir, buildId, callback) {
|
||||||
|
// make a compileDir/CACHE_SUBDIR/build_id directory and
|
||||||
|
// copy all the output files into it
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
const cacheRoot = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR)
|
||||||
|
// Put the files into a new cache subdirectory
|
||||||
|
const cacheDir = Path.join(
|
||||||
|
compileDir,
|
||||||
|
OutputCacheManager.CACHE_SUBDIR,
|
||||||
|
buildId
|
||||||
|
)
|
||||||
|
// Is it a per-user compile? check if compile directory is PROJECTID-USERID
|
||||||
|
const perUser = Path.basename(compileDir).match(
|
||||||
|
/^[0-9a-f]{24}-[0-9a-f]{24}$/
|
||||||
|
)
|
||||||
|
|
||||||
|
// Archive logs in background
|
||||||
|
if (
|
||||||
|
(Settings.clsi != null ? Settings.clsi.archive_logs : undefined) ||
|
||||||
|
(Settings.clsi != null ? Settings.clsi.strace : undefined)
|
||||||
|
) {
|
||||||
|
OutputCacheManager.archiveLogs(
|
||||||
|
outputFiles,
|
||||||
|
compileDir,
|
||||||
|
buildId,
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
return logger.warn({ err }, 'erroring archiving log files')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// make the new cache directory
|
||||||
|
return fse.ensureDir(cacheDir, function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
logger.error(
|
||||||
|
{ err, directory: cacheDir },
|
||||||
|
'error creating cache directory'
|
||||||
|
)
|
||||||
|
return callback(err, outputFiles)
|
||||||
|
} else {
|
||||||
|
// copy all the output files into the new cache directory
|
||||||
|
const results = []
|
||||||
|
return async.mapSeries(
|
||||||
|
outputFiles,
|
||||||
|
function (file, cb) {
|
||||||
|
// don't send dot files as output, express doesn't serve them
|
||||||
|
if (OutputCacheManager._fileIsHidden(file.path)) {
|
||||||
|
logger.debug(
|
||||||
|
{ compileDir, path: file.path },
|
||||||
|
'ignoring dotfile in output'
|
||||||
|
)
|
||||||
|
return cb()
|
||||||
|
}
|
||||||
|
// copy other files into cache directory if valid
|
||||||
|
const newFile = _.clone(file)
|
||||||
|
const [src, dst] = Array.from([
|
||||||
|
Path.join(compileDir, file.path),
|
||||||
|
Path.join(cacheDir, file.path)
|
||||||
|
])
|
||||||
|
return OutputCacheManager._checkFileIsSafe(src, function (
|
||||||
|
err,
|
||||||
|
isSafe
|
||||||
|
) {
|
||||||
|
if (err != null) {
|
||||||
|
return cb(err)
|
||||||
|
}
|
||||||
|
if (!isSafe) {
|
||||||
|
return cb()
|
||||||
|
}
|
||||||
|
return OutputCacheManager._checkIfShouldCopy(src, function (
|
||||||
|
err,
|
||||||
|
shouldCopy
|
||||||
|
) {
|
||||||
|
if (err != null) {
|
||||||
|
return cb(err)
|
||||||
|
}
|
||||||
|
if (!shouldCopy) {
|
||||||
|
return cb()
|
||||||
|
}
|
||||||
|
return OutputCacheManager._copyFile(src, dst, function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
return cb(err)
|
||||||
|
}
|
||||||
|
newFile.build = buildId // attach a build id if we cached the file
|
||||||
|
results.push(newFile)
|
||||||
|
return cb()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
// pass back the original files if we encountered *any* error
|
||||||
|
callback(err, outputFiles)
|
||||||
|
// clean up the directory we just created
|
||||||
|
return fse.remove(cacheDir, function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
return logger.error(
|
||||||
|
{ err, dir: cacheDir },
|
||||||
|
'error removing cache dir after failure'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
// pass back the list of new files in the cache
|
||||||
|
callback(err, results)
|
||||||
|
// let file expiry run in the background, expire all previous files if per-user
|
||||||
|
return OutputCacheManager.expireOutputFiles(cacheRoot, {
|
||||||
|
keep: buildId,
|
||||||
|
limit: perUser ? 1 : null
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
archiveLogs(outputFiles, compileDir, buildId, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
const archiveDir = Path.join(
|
||||||
|
compileDir,
|
||||||
|
OutputCacheManager.ARCHIVE_SUBDIR,
|
||||||
|
buildId
|
||||||
|
)
|
||||||
|
logger.log({ dir: archiveDir }, 'archiving log files for project')
|
||||||
|
return fse.ensureDir(archiveDir, function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
return async.mapSeries(
|
||||||
|
outputFiles,
|
||||||
|
function (file, cb) {
|
||||||
|
const [src, dst] = Array.from([
|
||||||
|
Path.join(compileDir, file.path),
|
||||||
|
Path.join(archiveDir, file.path)
|
||||||
|
])
|
||||||
|
return OutputCacheManager._checkFileIsSafe(src, function (
|
||||||
|
err,
|
||||||
|
isSafe
|
||||||
|
) {
|
||||||
|
if (err != null) {
|
||||||
|
return cb(err)
|
||||||
|
}
|
||||||
|
if (!isSafe) {
|
||||||
|
return cb()
|
||||||
|
}
|
||||||
|
return OutputCacheManager._checkIfShouldArchive(src, function (
|
||||||
|
err,
|
||||||
|
shouldArchive
|
||||||
|
) {
|
||||||
|
if (err != null) {
|
||||||
|
return cb(err)
|
||||||
|
}
|
||||||
|
if (!shouldArchive) {
|
||||||
|
return cb()
|
||||||
|
}
|
||||||
|
return OutputCacheManager._copyFile(src, dst, cb)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
expireOutputFiles(cacheRoot, options, callback) {
|
||||||
|
// look in compileDir for build dirs and delete if > N or age of mod time > T
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return fs.readdir(cacheRoot, function (err, results) {
|
||||||
|
if (err != null) {
|
||||||
|
if (err.code === 'ENOENT') {
|
||||||
|
return callback(null)
|
||||||
|
} // cache directory is empty
|
||||||
|
logger.error({ err, project_id: cacheRoot }, 'error clearing cache')
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
const dirs = results.sort().reverse()
|
||||||
|
const currentTime = Date.now()
|
||||||
|
|
||||||
|
const isExpired = function (dir, index) {
|
||||||
|
if ((options != null ? options.keep : undefined) === dir) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
// remove any directories over the requested (non-null) limit
|
||||||
|
if (
|
||||||
|
(options != null ? options.limit : undefined) != null &&
|
||||||
|
index > options.limit
|
||||||
|
) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
// remove any directories over the hard limit
|
||||||
|
if (index > OutputCacheManager.CACHE_LIMIT) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
// we can get the build time from the first part of the directory name DDDD-RRRR
|
||||||
|
// DDDD is date and RRRR is random bytes
|
||||||
|
const dirTime = parseInt(
|
||||||
|
__guard__(dir.split('-'), (x) => x[0]),
|
||||||
|
16
|
||||||
|
)
|
||||||
|
const age = currentTime - dirTime
|
||||||
|
return age > OutputCacheManager.CACHE_AGE
|
||||||
|
}
|
||||||
|
|
||||||
|
const toRemove = _.filter(dirs, isExpired)
|
||||||
|
|
||||||
|
const removeDir = (dir, cb) =>
|
||||||
|
fse.remove(Path.join(cacheRoot, dir), function (err, result) {
|
||||||
|
logger.log({ cache: cacheRoot, dir }, 'removed expired cache dir')
|
||||||
|
if (err != null) {
|
||||||
|
logger.error({ err, dir }, 'cache remove error')
|
||||||
|
}
|
||||||
|
return cb(err, result)
|
||||||
|
})
|
||||||
|
return async.eachSeries(
|
||||||
|
toRemove,
|
||||||
|
(dir, cb) => removeDir(dir, cb),
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_fileIsHidden(path) {
|
||||||
|
return (path != null ? path.match(/^\.|\/\./) : undefined) != null
|
||||||
|
},
|
||||||
|
|
||||||
|
_checkFileIsSafe(src, callback) {
|
||||||
|
// check if we have a valid file to copy into the cache
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, isSafe) {}
|
||||||
|
}
|
||||||
|
return fs.stat(src, function (err, stats) {
|
||||||
|
if ((err != null ? err.code : undefined) === 'ENOENT') {
|
||||||
|
logger.warn(
|
||||||
|
{ err, file: src },
|
||||||
|
'file has disappeared before copying to build cache'
|
||||||
|
)
|
||||||
|
return callback(err, false)
|
||||||
|
} else if (err != null) {
|
||||||
|
// some other problem reading the file
|
||||||
|
logger.error({ err, file: src }, 'stat error for file in cache')
|
||||||
|
return callback(err, false)
|
||||||
|
} else if (!stats.isFile()) {
|
||||||
|
// other filetype - reject it
|
||||||
|
logger.warn(
|
||||||
|
{ src, stat: stats },
|
||||||
|
'nonfile output - refusing to copy to cache'
|
||||||
|
)
|
||||||
|
return callback(null, false)
|
||||||
|
} else {
|
||||||
|
// it's a plain file, ok to copy
|
||||||
|
return callback(null, true)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_copyFile(src, dst, callback) {
|
||||||
|
// copy output file into the cache
|
||||||
|
return fse.copy(src, dst, function (err) {
|
||||||
|
if ((err != null ? err.code : undefined) === 'ENOENT') {
|
||||||
|
logger.warn(
|
||||||
|
{ err, file: src },
|
||||||
|
'file has disappeared when copying to build cache'
|
||||||
|
)
|
||||||
|
return callback(err, false)
|
||||||
|
} else if (err != null) {
|
||||||
|
logger.error({ err, src, dst }, 'copy error for file in cache')
|
||||||
|
return callback(err)
|
||||||
|
} else {
|
||||||
|
if (
|
||||||
|
Settings.clsi != null ? Settings.clsi.optimiseInDocker : undefined
|
||||||
|
) {
|
||||||
|
// don't run any optimisations on the pdf when they are done
|
||||||
|
// in the docker container
|
||||||
|
return callback()
|
||||||
|
} else {
|
||||||
|
// call the optimiser for the file too
|
||||||
|
return OutputFileOptimiser.optimiseFile(src, dst, callback)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_checkIfShouldCopy(src, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err, shouldCopy) {}
|
||||||
|
}
|
||||||
|
return callback(null, !Path.basename(src).match(/^strace/))
|
||||||
|
},
|
||||||
|
|
||||||
|
_checkIfShouldArchive(src, callback) {
|
||||||
|
let needle
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (err, shouldCopy) {}
|
||||||
|
}
|
||||||
|
if (Path.basename(src).match(/^strace/)) {
|
||||||
|
return callback(null, true)
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
(Settings.clsi != null ? Settings.clsi.archive_logs : undefined) &&
|
||||||
|
((needle = Path.basename(src)),
|
||||||
|
['output.log', 'output.blg'].includes(needle))
|
||||||
|
) {
|
||||||
|
return callback(null, true)
|
||||||
|
}
|
||||||
|
return callback(null, false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function __guard__(value, transform) {
|
||||||
|
return typeof value !== 'undefined' && value !== null
|
||||||
|
? transform(value)
|
||||||
|
: undefined
|
||||||
|
}
|
||||||
115
app/js/OutputFileFinder.js
Normal file
115
app/js/OutputFileFinder.js
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
/* eslint-disable
|
||||||
|
handle-callback-err,
|
||||||
|
no-return-assign,
|
||||||
|
no-unused-vars,
|
||||||
|
no-useless-escape,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS101: Remove unnecessary use of Array.from
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS103: Rewrite code to no longer use __guard__
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let OutputFileFinder
|
||||||
|
const async = require('async')
|
||||||
|
const fs = require('fs')
|
||||||
|
const Path = require('path')
|
||||||
|
const { spawn } = require('child_process')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
|
||||||
|
module.exports = OutputFileFinder = {
|
||||||
|
findOutputFiles(resources, directory, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, outputFiles, allFiles) {}
|
||||||
|
}
|
||||||
|
const incomingResources = {}
|
||||||
|
for (const resource of Array.from(resources)) {
|
||||||
|
incomingResources[resource.path] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
return OutputFileFinder._getAllFiles(directory, function (error, allFiles) {
|
||||||
|
if (allFiles == null) {
|
||||||
|
allFiles = []
|
||||||
|
}
|
||||||
|
if (error != null) {
|
||||||
|
logger.err({ err: error }, 'error finding all output files')
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
const outputFiles = []
|
||||||
|
for (const file of Array.from(allFiles)) {
|
||||||
|
if (!incomingResources[file]) {
|
||||||
|
outputFiles.push({
|
||||||
|
path: file,
|
||||||
|
type: __guard__(file.match(/\.([^\.]+)$/), (x) => x[1])
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return callback(null, outputFiles, allFiles)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_getAllFiles(directory, _callback) {
|
||||||
|
if (_callback == null) {
|
||||||
|
_callback = function (error, fileList) {}
|
||||||
|
}
|
||||||
|
const callback = function (error, fileList) {
|
||||||
|
_callback(error, fileList)
|
||||||
|
return (_callback = function () {})
|
||||||
|
}
|
||||||
|
|
||||||
|
// don't include clsi-specific files/directories in the output list
|
||||||
|
const EXCLUDE_DIRS = [
|
||||||
|
'-name',
|
||||||
|
'.cache',
|
||||||
|
'-o',
|
||||||
|
'-name',
|
||||||
|
'.archive',
|
||||||
|
'-o',
|
||||||
|
'-name',
|
||||||
|
'.project-*'
|
||||||
|
]
|
||||||
|
const args = [
|
||||||
|
directory,
|
||||||
|
'(',
|
||||||
|
...Array.from(EXCLUDE_DIRS),
|
||||||
|
')',
|
||||||
|
'-prune',
|
||||||
|
'-o',
|
||||||
|
'-type',
|
||||||
|
'f',
|
||||||
|
'-print'
|
||||||
|
]
|
||||||
|
logger.log({ args }, 'running find command')
|
||||||
|
|
||||||
|
const proc = spawn('find', args)
|
||||||
|
let stdout = ''
|
||||||
|
proc.stdout.setEncoding('utf8').on('data', (chunk) => (stdout += chunk))
|
||||||
|
proc.on('error', callback)
|
||||||
|
return proc.on('close', function (code) {
|
||||||
|
if (code !== 0) {
|
||||||
|
logger.warn(
|
||||||
|
{ directory, code },
|
||||||
|
"find returned error, directory likely doesn't exist"
|
||||||
|
)
|
||||||
|
return callback(null, [])
|
||||||
|
}
|
||||||
|
let fileList = stdout.trim().split('\n')
|
||||||
|
fileList = fileList.map(function (file) {
|
||||||
|
// Strip leading directory
|
||||||
|
let path
|
||||||
|
return (path = Path.relative(directory, file))
|
||||||
|
})
|
||||||
|
return callback(null, fileList)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function __guard__(value, transform) {
|
||||||
|
return typeof value !== 'undefined' && value !== null
|
||||||
|
? transform(value)
|
||||||
|
: undefined
|
||||||
|
}
|
||||||
103
app/js/OutputFileOptimiser.js
Normal file
103
app/js/OutputFileOptimiser.js
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
/* eslint-disable
|
||||||
|
handle-callback-err,
|
||||||
|
no-return-assign,
|
||||||
|
no-undef,
|
||||||
|
no-unused-vars,
|
||||||
|
node/no-deprecated-api,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let OutputFileOptimiser
|
||||||
|
const fs = require('fs')
|
||||||
|
const Path = require('path')
|
||||||
|
const { spawn } = require('child_process')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const Metrics = require('./Metrics')
|
||||||
|
const _ = require('lodash')
|
||||||
|
|
||||||
|
module.exports = OutputFileOptimiser = {
|
||||||
|
optimiseFile(src, dst, callback) {
|
||||||
|
// check output file (src) and see if we can optimise it, storing
|
||||||
|
// the result in the build directory (dst)
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
if (src.match(/\/output\.pdf$/)) {
|
||||||
|
return OutputFileOptimiser.checkIfPDFIsOptimised(src, function (
|
||||||
|
err,
|
||||||
|
isOptimised
|
||||||
|
) {
|
||||||
|
if (err != null || isOptimised) {
|
||||||
|
return callback(null)
|
||||||
|
}
|
||||||
|
return OutputFileOptimiser.optimisePDF(src, dst, callback)
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
return callback(null)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
checkIfPDFIsOptimised(file, callback) {
|
||||||
|
const SIZE = 16 * 1024 // check the header of the pdf
|
||||||
|
const result = Buffer.alloc(SIZE) // fills with zeroes by default
|
||||||
|
return fs.open(file, 'r', function (err, fd) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
return fs.read(fd, result, 0, SIZE, 0, (errRead, bytesRead, buffer) =>
|
||||||
|
fs.close(fd, function (errClose) {
|
||||||
|
if (errRead != null) {
|
||||||
|
return callback(errRead)
|
||||||
|
}
|
||||||
|
if (typeof errReadClose !== 'undefined' && errReadClose !== null) {
|
||||||
|
return callback(errClose)
|
||||||
|
}
|
||||||
|
const isOptimised =
|
||||||
|
buffer.toString('ascii').indexOf('/Linearized 1') >= 0
|
||||||
|
return callback(null, isOptimised)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
optimisePDF(src, dst, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
const tmpOutput = dst + '.opt'
|
||||||
|
const args = ['--linearize', src, tmpOutput]
|
||||||
|
logger.log({ args }, 'running qpdf command')
|
||||||
|
|
||||||
|
const timer = new Metrics.Timer('qpdf')
|
||||||
|
const proc = spawn('qpdf', args)
|
||||||
|
let stdout = ''
|
||||||
|
proc.stdout.setEncoding('utf8').on('data', (chunk) => (stdout += chunk))
|
||||||
|
callback = _.once(callback) // avoid double call back for error and close event
|
||||||
|
proc.on('error', function (err) {
|
||||||
|
logger.warn({ err, args }, 'qpdf failed')
|
||||||
|
return callback(null)
|
||||||
|
}) // ignore the error
|
||||||
|
return proc.on('close', function (code) {
|
||||||
|
timer.done()
|
||||||
|
if (code !== 0) {
|
||||||
|
logger.warn({ code, args }, 'qpdf returned error')
|
||||||
|
return callback(null) // ignore the error
|
||||||
|
}
|
||||||
|
return fs.rename(tmpOutput, dst, function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
logger.warn(
|
||||||
|
{ tmpOutput, dst },
|
||||||
|
'failed to rename output of qpdf command'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return callback(null)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
} // ignore the error
|
||||||
|
}
|
||||||
185
app/js/ProjectPersistenceManager.js
Normal file
185
app/js/ProjectPersistenceManager.js
Normal file
@@ -0,0 +1,185 @@
|
|||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
handle-callback-err,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS101: Remove unnecessary use of Array.from
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let ProjectPersistenceManager
|
||||||
|
const UrlCache = require('./UrlCache')
|
||||||
|
const CompileManager = require('./CompileManager')
|
||||||
|
const db = require('./db')
|
||||||
|
const dbQueue = require('./DbQueue')
|
||||||
|
const async = require('async')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const oneDay = 24 * 60 * 60 * 1000
|
||||||
|
const Settings = require('settings-sharelatex')
|
||||||
|
const diskusage = require('diskusage')
|
||||||
|
|
||||||
|
module.exports = ProjectPersistenceManager = {
|
||||||
|
EXPIRY_TIMEOUT: Settings.project_cache_length_ms || oneDay * 2.5,
|
||||||
|
|
||||||
|
refreshExpiryTimeout(callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
diskusage.check('/', function (err, stats) {
|
||||||
|
if (err) {
|
||||||
|
logger.err({ err: err }, 'error getting disk usage')
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
const lowDisk = stats.available / stats.total < 0.1
|
||||||
|
const lowerExpiry = ProjectPersistenceManager.EXPIRY_TIMEOUT * 0.9
|
||||||
|
if (lowDisk && Settings.project_cache_length_ms / 2 < lowerExpiry) {
|
||||||
|
logger.warn(
|
||||||
|
{ stats: stats },
|
||||||
|
'disk running low on space, modifying EXPIRY_TIMEOUT'
|
||||||
|
)
|
||||||
|
ProjectPersistenceManager.EXPIRY_TIMEOUT = lowerExpiry
|
||||||
|
}
|
||||||
|
callback()
|
||||||
|
})
|
||||||
|
},
|
||||||
|
markProjectAsJustAccessed(project_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
const job = (cb) =>
|
||||||
|
db.Project.findOrCreate({ where: { project_id } })
|
||||||
|
.spread((project, created) =>
|
||||||
|
project
|
||||||
|
.update({ lastAccessed: new Date() })
|
||||||
|
.then(() => cb())
|
||||||
|
.error(cb)
|
||||||
|
)
|
||||||
|
.error(cb)
|
||||||
|
return dbQueue.queue.push(job, callback)
|
||||||
|
},
|
||||||
|
|
||||||
|
clearExpiredProjects(callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return ProjectPersistenceManager._findExpiredProjectIds(function (
|
||||||
|
error,
|
||||||
|
project_ids
|
||||||
|
) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
logger.log({ project_ids }, 'clearing expired projects')
|
||||||
|
const jobs = Array.from(project_ids || []).map((project_id) =>
|
||||||
|
((project_id) => (callback) =>
|
||||||
|
ProjectPersistenceManager.clearProjectFromCache(project_id, function (
|
||||||
|
err
|
||||||
|
) {
|
||||||
|
if (err != null) {
|
||||||
|
logger.error({ err, project_id }, 'error clearing project')
|
||||||
|
}
|
||||||
|
return callback()
|
||||||
|
}))(project_id)
|
||||||
|
)
|
||||||
|
return async.series(jobs, function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return CompileManager.clearExpiredProjects(
|
||||||
|
ProjectPersistenceManager.EXPIRY_TIMEOUT,
|
||||||
|
(error) => callback()
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}, // ignore any errors from deleting directories
|
||||||
|
|
||||||
|
clearProject(project_id, user_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
logger.log({ project_id, user_id }, 'clearing project for user')
|
||||||
|
return CompileManager.clearProject(project_id, user_id, function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return ProjectPersistenceManager.clearProjectFromCache(
|
||||||
|
project_id,
|
||||||
|
function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
clearProjectFromCache(project_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
logger.log({ project_id }, 'clearing project from cache')
|
||||||
|
return UrlCache.clearProject(project_id, function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
logger.err({ error, project_id }, 'error clearing project from cache')
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return ProjectPersistenceManager._clearProjectFromDatabase(
|
||||||
|
project_id,
|
||||||
|
function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
logger.err(
|
||||||
|
{ error, project_id },
|
||||||
|
'error clearing project from database'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_clearProjectFromDatabase(project_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
logger.log({ project_id }, 'clearing project from database')
|
||||||
|
const job = (cb) =>
|
||||||
|
db.Project.destroy({ where: { project_id } })
|
||||||
|
.then(() => cb())
|
||||||
|
.error(cb)
|
||||||
|
return dbQueue.queue.push(job, callback)
|
||||||
|
},
|
||||||
|
|
||||||
|
_findExpiredProjectIds(callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, project_ids) {}
|
||||||
|
}
|
||||||
|
const job = function (cb) {
|
||||||
|
const keepProjectsFrom = new Date(
|
||||||
|
Date.now() - ProjectPersistenceManager.EXPIRY_TIMEOUT
|
||||||
|
)
|
||||||
|
const q = {}
|
||||||
|
q[db.op.lt] = keepProjectsFrom
|
||||||
|
return db.Project.findAll({ where: { lastAccessed: q } })
|
||||||
|
.then((projects) =>
|
||||||
|
cb(
|
||||||
|
null,
|
||||||
|
projects.map((project) => project.project_id)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.error(cb)
|
||||||
|
}
|
||||||
|
|
||||||
|
return dbQueue.queue.push(job, callback)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.log(
|
||||||
|
{ EXPIRY_TIMEOUT: ProjectPersistenceManager.EXPIRY_TIMEOUT },
|
||||||
|
'project assets kept timeout'
|
||||||
|
)
|
||||||
233
app/js/RequestParser.js
Normal file
233
app/js/RequestParser.js
Normal file
@@ -0,0 +1,233 @@
|
|||||||
|
/* eslint-disable
|
||||||
|
handle-callback-err,
|
||||||
|
no-control-regex,
|
||||||
|
no-throw-literal,
|
||||||
|
no-unused-vars,
|
||||||
|
no-useless-escape,
|
||||||
|
standard/no-callback-literal,
|
||||||
|
valid-typeof,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS101: Remove unnecessary use of Array.from
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS205: Consider reworking code to avoid use of IIFEs
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let RequestParser
|
||||||
|
const settings = require('settings-sharelatex')
|
||||||
|
|
||||||
|
module.exports = RequestParser = {
|
||||||
|
VALID_COMPILERS: ['pdflatex', 'latex', 'xelatex', 'lualatex'],
|
||||||
|
MAX_TIMEOUT: 600,
|
||||||
|
|
||||||
|
parse(body, callback) {
|
||||||
|
let resource
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, data) {}
|
||||||
|
}
|
||||||
|
const response = {}
|
||||||
|
|
||||||
|
if (body.compile == null) {
|
||||||
|
return callback('top level object should have a compile attribute')
|
||||||
|
}
|
||||||
|
|
||||||
|
const { compile } = body
|
||||||
|
if (!compile.options) {
|
||||||
|
compile.options = {}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
response.compiler = this._parseAttribute(
|
||||||
|
'compiler',
|
||||||
|
compile.options.compiler,
|
||||||
|
{
|
||||||
|
validValues: this.VALID_COMPILERS,
|
||||||
|
default: 'pdflatex',
|
||||||
|
type: 'string'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response.timeout = this._parseAttribute(
|
||||||
|
'timeout',
|
||||||
|
compile.options.timeout,
|
||||||
|
{
|
||||||
|
default: RequestParser.MAX_TIMEOUT,
|
||||||
|
type: 'number'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response.imageName = this._parseAttribute(
|
||||||
|
'imageName',
|
||||||
|
compile.options.imageName,
|
||||||
|
{
|
||||||
|
type: 'string',
|
||||||
|
validValues:
|
||||||
|
settings.clsi &&
|
||||||
|
settings.clsi.docker &&
|
||||||
|
settings.clsi.docker.allowedImages
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response.draft = this._parseAttribute('draft', compile.options.draft, {
|
||||||
|
default: false,
|
||||||
|
type: 'boolean'
|
||||||
|
})
|
||||||
|
response.check = this._parseAttribute('check', compile.options.check, {
|
||||||
|
type: 'string'
|
||||||
|
})
|
||||||
|
response.flags = this._parseAttribute('flags', compile.options.flags, {
|
||||||
|
default: [],
|
||||||
|
type: 'object'
|
||||||
|
})
|
||||||
|
if (settings.allowedCompileGroups) {
|
||||||
|
response.compileGroup = this._parseAttribute(
|
||||||
|
'compileGroup',
|
||||||
|
compile.options.compileGroup,
|
||||||
|
{
|
||||||
|
validValues: settings.allowedCompileGroups,
|
||||||
|
default: '',
|
||||||
|
type: 'string'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
// The syncType specifies whether the request contains all
|
||||||
|
// resources (full) or only those resources to be updated
|
||||||
|
// in-place (incremental).
|
||||||
|
response.syncType = this._parseAttribute(
|
||||||
|
'syncType',
|
||||||
|
compile.options.syncType,
|
||||||
|
{
|
||||||
|
validValues: ['full', 'incremental'],
|
||||||
|
type: 'string'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
// The syncState is an identifier passed in with the request
|
||||||
|
// which has the property that it changes when any resource is
|
||||||
|
// added, deleted, moved or renamed.
|
||||||
|
//
|
||||||
|
// on syncType full the syncState identifier is passed in and
|
||||||
|
// stored
|
||||||
|
//
|
||||||
|
// on syncType incremental the syncState identifier must match
|
||||||
|
// the stored value
|
||||||
|
response.syncState = this._parseAttribute(
|
||||||
|
'syncState',
|
||||||
|
compile.options.syncState,
|
||||||
|
{ type: 'string' }
|
||||||
|
)
|
||||||
|
|
||||||
|
if (response.timeout > RequestParser.MAX_TIMEOUT) {
|
||||||
|
response.timeout = RequestParser.MAX_TIMEOUT
|
||||||
|
}
|
||||||
|
response.timeout = response.timeout * 1000 // milliseconds
|
||||||
|
|
||||||
|
response.resources = (() => {
|
||||||
|
const result = []
|
||||||
|
for (resource of Array.from(compile.resources || [])) {
|
||||||
|
result.push(this._parseResource(resource))
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
})()
|
||||||
|
|
||||||
|
const rootResourcePath = this._parseAttribute(
|
||||||
|
'rootResourcePath',
|
||||||
|
compile.rootResourcePath,
|
||||||
|
{
|
||||||
|
default: 'main.tex',
|
||||||
|
type: 'string'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
const originalRootResourcePath = rootResourcePath
|
||||||
|
const sanitizedRootResourcePath = RequestParser._sanitizePath(
|
||||||
|
rootResourcePath
|
||||||
|
)
|
||||||
|
response.rootResourcePath = RequestParser._checkPath(
|
||||||
|
sanitizedRootResourcePath
|
||||||
|
)
|
||||||
|
|
||||||
|
for (resource of Array.from(response.resources)) {
|
||||||
|
if (resource.path === originalRootResourcePath) {
|
||||||
|
resource.path = sanitizedRootResourcePath
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error1) {
|
||||||
|
const error = error1
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
|
||||||
|
return callback(null, response)
|
||||||
|
},
|
||||||
|
|
||||||
|
_parseResource(resource) {
|
||||||
|
let modified
|
||||||
|
if (resource.path == null || typeof resource.path !== 'string') {
|
||||||
|
throw 'all resources should have a path attribute'
|
||||||
|
}
|
||||||
|
|
||||||
|
if (resource.modified != null) {
|
||||||
|
modified = new Date(resource.modified)
|
||||||
|
if (isNaN(modified.getTime())) {
|
||||||
|
throw `resource modified date could not be understood: ${resource.modified}`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (resource.url == null && resource.content == null) {
|
||||||
|
throw 'all resources should have either a url or content attribute'
|
||||||
|
}
|
||||||
|
if (resource.content != null && typeof resource.content !== 'string') {
|
||||||
|
throw 'content attribute should be a string'
|
||||||
|
}
|
||||||
|
if (resource.url != null && typeof resource.url !== 'string') {
|
||||||
|
throw 'url attribute should be a string'
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
path: resource.path,
|
||||||
|
modified,
|
||||||
|
url: resource.url,
|
||||||
|
content: resource.content
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
_parseAttribute(name, attribute, options) {
|
||||||
|
if (attribute != null) {
|
||||||
|
if (options.validValues != null) {
|
||||||
|
if (options.validValues.indexOf(attribute) === -1) {
|
||||||
|
throw `${name} attribute should be one of: ${options.validValues.join(
|
||||||
|
', '
|
||||||
|
)}`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (options.type != null) {
|
||||||
|
if (typeof attribute !== options.type) {
|
||||||
|
throw `${name} attribute should be a ${options.type}`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (options.default != null) {
|
||||||
|
return options.default
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return attribute
|
||||||
|
},
|
||||||
|
|
||||||
|
_sanitizePath(path) {
|
||||||
|
// See http://php.net/manual/en/function.escapeshellcmd.php
|
||||||
|
return path.replace(
|
||||||
|
/[\#\&\;\`\|\*\?\~\<\>\^\(\)\[\]\{\}\$\\\x0A\xFF\x00]/g,
|
||||||
|
''
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
_checkPath(path) {
|
||||||
|
// check that the request does not use a relative path
|
||||||
|
for (const dir of Array.from(path.split('/'))) {
|
||||||
|
if (dir === '..') {
|
||||||
|
throw 'relative path in root resource'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return path
|
||||||
|
}
|
||||||
|
}
|
||||||
156
app/js/ResourceStateManager.js
Normal file
156
app/js/ResourceStateManager.js
Normal file
@@ -0,0 +1,156 @@
|
|||||||
|
/* eslint-disable
|
||||||
|
handle-callback-err,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS101: Remove unnecessary use of Array.from
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS103: Rewrite code to no longer use __guard__
|
||||||
|
* DS201: Simplify complex destructure assignments
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let ResourceStateManager
|
||||||
|
const Path = require('path')
|
||||||
|
const fs = require('fs')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const settings = require('settings-sharelatex')
|
||||||
|
const Errors = require('./Errors')
|
||||||
|
const SafeReader = require('./SafeReader')
|
||||||
|
|
||||||
|
module.exports = ResourceStateManager = {
|
||||||
|
// The sync state is an identifier which must match for an
|
||||||
|
// incremental update to be allowed.
|
||||||
|
//
|
||||||
|
// The initial value is passed in and stored on a full
|
||||||
|
// compile, along with the list of resources..
|
||||||
|
//
|
||||||
|
// Subsequent incremental compiles must come with the same value - if
|
||||||
|
// not they will be rejected with a 409 Conflict response. The
|
||||||
|
// previous list of resources is returned.
|
||||||
|
//
|
||||||
|
// An incremental compile can only update existing files with new
|
||||||
|
// content. The sync state identifier must change if any docs or
|
||||||
|
// files are moved, added, deleted or renamed.
|
||||||
|
|
||||||
|
SYNC_STATE_FILE: '.project-sync-state',
|
||||||
|
SYNC_STATE_MAX_SIZE: 128 * 1024,
|
||||||
|
|
||||||
|
saveProjectState(state, resources, basePath, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
const stateFile = Path.join(basePath, this.SYNC_STATE_FILE)
|
||||||
|
if (state == null) {
|
||||||
|
// remove the file if no state passed in
|
||||||
|
logger.log({ state, basePath }, 'clearing sync state')
|
||||||
|
return fs.unlink(stateFile, function (err) {
|
||||||
|
if (err != null && err.code !== 'ENOENT') {
|
||||||
|
return callback(err)
|
||||||
|
} else {
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
logger.log({ state, basePath }, 'writing sync state')
|
||||||
|
const resourceList = Array.from(resources).map(
|
||||||
|
(resource) => resource.path
|
||||||
|
)
|
||||||
|
return fs.writeFile(
|
||||||
|
stateFile,
|
||||||
|
[...Array.from(resourceList), `stateHash:${state}`].join('\n'),
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
checkProjectStateMatches(state, basePath, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, resources) {}
|
||||||
|
}
|
||||||
|
const stateFile = Path.join(basePath, this.SYNC_STATE_FILE)
|
||||||
|
const size = this.SYNC_STATE_MAX_SIZE
|
||||||
|
return SafeReader.readFile(stateFile, size, 'utf8', function (
|
||||||
|
err,
|
||||||
|
result,
|
||||||
|
bytesRead
|
||||||
|
) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
if (bytesRead === size) {
|
||||||
|
logger.error(
|
||||||
|
{ file: stateFile, size, bytesRead },
|
||||||
|
'project state file truncated'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const array =
|
||||||
|
__guard__(result != null ? result.toString() : undefined, (x) =>
|
||||||
|
x.split('\n')
|
||||||
|
) || []
|
||||||
|
const adjustedLength = Math.max(array.length, 1)
|
||||||
|
const resourceList = array.slice(0, adjustedLength - 1)
|
||||||
|
const oldState = array[adjustedLength - 1]
|
||||||
|
const newState = `stateHash:${state}`
|
||||||
|
logger.log(
|
||||||
|
{ state, oldState, basePath, stateMatches: newState === oldState },
|
||||||
|
'checking sync state'
|
||||||
|
)
|
||||||
|
if (newState !== oldState) {
|
||||||
|
return callback(
|
||||||
|
new Errors.FilesOutOfSyncError('invalid state for incremental update')
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
const resources = Array.from(resourceList).map((path) => ({ path }))
|
||||||
|
return callback(null, resources)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
checkResourceFiles(resources, allFiles, basePath, callback) {
|
||||||
|
// check the paths are all relative to current directory
|
||||||
|
let file
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
for (file of Array.from(resources || [])) {
|
||||||
|
for (const dir of Array.from(
|
||||||
|
__guard__(file != null ? file.path : undefined, (x) => x.split('/'))
|
||||||
|
)) {
|
||||||
|
if (dir === '..') {
|
||||||
|
return callback(new Error('relative path in resource file list'))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// check if any of the input files are not present in list of files
|
||||||
|
const seenFile = {}
|
||||||
|
for (file of Array.from(allFiles)) {
|
||||||
|
seenFile[file] = true
|
||||||
|
}
|
||||||
|
const missingFiles = Array.from(resources)
|
||||||
|
.filter((resource) => !seenFile[resource.path])
|
||||||
|
.map((resource) => resource.path)
|
||||||
|
if ((missingFiles != null ? missingFiles.length : undefined) > 0) {
|
||||||
|
logger.err(
|
||||||
|
{ missingFiles, basePath, allFiles, resources },
|
||||||
|
'missing input files for project'
|
||||||
|
)
|
||||||
|
return callback(
|
||||||
|
new Errors.FilesOutOfSyncError(
|
||||||
|
'resource files missing in incremental update'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function __guard__(value, transform) {
|
||||||
|
return typeof value !== 'undefined' && value !== null
|
||||||
|
? transform(value)
|
||||||
|
: undefined
|
||||||
|
}
|
||||||
356
app/js/ResourceWriter.js
Normal file
356
app/js/ResourceWriter.js
Normal file
@@ -0,0 +1,356 @@
|
|||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
handle-callback-err,
|
||||||
|
no-return-assign,
|
||||||
|
no-unused-vars,
|
||||||
|
no-useless-escape,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS101: Remove unnecessary use of Array.from
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let ResourceWriter
|
||||||
|
const UrlCache = require('./UrlCache')
|
||||||
|
const Path = require('path')
|
||||||
|
const fs = require('fs')
|
||||||
|
const async = require('async')
|
||||||
|
const OutputFileFinder = require('./OutputFileFinder')
|
||||||
|
const ResourceStateManager = require('./ResourceStateManager')
|
||||||
|
const Metrics = require('./Metrics')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const settings = require('settings-sharelatex')
|
||||||
|
|
||||||
|
const parallelFileDownloads = settings.parallelFileDownloads || 1
|
||||||
|
|
||||||
|
module.exports = ResourceWriter = {
|
||||||
|
syncResourcesToDisk(request, basePath, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, resourceList) {}
|
||||||
|
}
|
||||||
|
if (request.syncType === 'incremental') {
|
||||||
|
logger.log(
|
||||||
|
{ project_id: request.project_id, user_id: request.user_id },
|
||||||
|
'incremental sync'
|
||||||
|
)
|
||||||
|
return ResourceStateManager.checkProjectStateMatches(
|
||||||
|
request.syncState,
|
||||||
|
basePath,
|
||||||
|
function (error, resourceList) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return ResourceWriter._removeExtraneousFiles(
|
||||||
|
resourceList,
|
||||||
|
basePath,
|
||||||
|
function (error, outputFiles, allFiles) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return ResourceStateManager.checkResourceFiles(
|
||||||
|
resourceList,
|
||||||
|
allFiles,
|
||||||
|
basePath,
|
||||||
|
function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return ResourceWriter.saveIncrementalResourcesToDisk(
|
||||||
|
request.project_id,
|
||||||
|
request.resources,
|
||||||
|
basePath,
|
||||||
|
function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return callback(null, resourceList)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
logger.log(
|
||||||
|
{ project_id: request.project_id, user_id: request.user_id },
|
||||||
|
'full sync'
|
||||||
|
)
|
||||||
|
return this.saveAllResourcesToDisk(
|
||||||
|
request.project_id,
|
||||||
|
request.resources,
|
||||||
|
basePath,
|
||||||
|
function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return ResourceStateManager.saveProjectState(
|
||||||
|
request.syncState,
|
||||||
|
request.resources,
|
||||||
|
basePath,
|
||||||
|
function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return callback(null, request.resources)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
saveIncrementalResourcesToDisk(project_id, resources, basePath, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return this._createDirectory(basePath, (error) => {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
const jobs = Array.from(resources).map((resource) =>
|
||||||
|
((resource) => {
|
||||||
|
return (callback) =>
|
||||||
|
this._writeResourceToDisk(project_id, resource, basePath, callback)
|
||||||
|
})(resource)
|
||||||
|
)
|
||||||
|
return async.parallelLimit(jobs, parallelFileDownloads, callback)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
saveAllResourcesToDisk(project_id, resources, basePath, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return this._createDirectory(basePath, (error) => {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return this._removeExtraneousFiles(resources, basePath, (error) => {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
const jobs = Array.from(resources).map((resource) =>
|
||||||
|
((resource) => {
|
||||||
|
return (callback) =>
|
||||||
|
this._writeResourceToDisk(
|
||||||
|
project_id,
|
||||||
|
resource,
|
||||||
|
basePath,
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
})(resource)
|
||||||
|
)
|
||||||
|
return async.parallelLimit(jobs, parallelFileDownloads, callback)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_createDirectory(basePath, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return fs.mkdir(basePath, function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
if (err.code === 'EEXIST') {
|
||||||
|
return callback()
|
||||||
|
} else {
|
||||||
|
logger.log({ err, dir: basePath }, 'error creating directory')
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_removeExtraneousFiles(resources, basePath, _callback) {
|
||||||
|
if (_callback == null) {
|
||||||
|
_callback = function (error, outputFiles, allFiles) {}
|
||||||
|
}
|
||||||
|
const timer = new Metrics.Timer('unlink-output-files')
|
||||||
|
const callback = function (error, ...result) {
|
||||||
|
timer.done()
|
||||||
|
return _callback(error, ...Array.from(result))
|
||||||
|
}
|
||||||
|
|
||||||
|
return OutputFileFinder.findOutputFiles(resources, basePath, function (
|
||||||
|
error,
|
||||||
|
outputFiles,
|
||||||
|
allFiles
|
||||||
|
) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
|
||||||
|
const jobs = []
|
||||||
|
for (const file of Array.from(outputFiles || [])) {
|
||||||
|
;(function (file) {
|
||||||
|
const { path } = file
|
||||||
|
let should_delete = true
|
||||||
|
if (
|
||||||
|
path.match(/^output\./) ||
|
||||||
|
path.match(/\.aux$/) ||
|
||||||
|
path.match(/^cache\//)
|
||||||
|
) {
|
||||||
|
// knitr cache
|
||||||
|
should_delete = false
|
||||||
|
}
|
||||||
|
if (path.match(/^output-.*/)) {
|
||||||
|
// Tikz cached figures (default case)
|
||||||
|
should_delete = false
|
||||||
|
}
|
||||||
|
if (path.match(/\.(pdf|dpth|md5)$/)) {
|
||||||
|
// Tikz cached figures (by extension)
|
||||||
|
should_delete = false
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
path.match(/\.(pygtex|pygstyle)$/) ||
|
||||||
|
path.match(/(^|\/)_minted-[^\/]+\//)
|
||||||
|
) {
|
||||||
|
// minted files/directory
|
||||||
|
should_delete = false
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
path.match(/\.md\.tex$/) ||
|
||||||
|
path.match(/(^|\/)_markdown_[^\/]+\//)
|
||||||
|
) {
|
||||||
|
// markdown files/directory
|
||||||
|
should_delete = false
|
||||||
|
}
|
||||||
|
if (path.match(/-eps-converted-to\.pdf$/)) {
|
||||||
|
// Epstopdf generated files
|
||||||
|
should_delete = false
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
path === 'output.pdf' ||
|
||||||
|
path === 'output.dvi' ||
|
||||||
|
path === 'output.log' ||
|
||||||
|
path === 'output.xdv' ||
|
||||||
|
path === 'output.stdout' ||
|
||||||
|
path === 'output.stderr'
|
||||||
|
) {
|
||||||
|
should_delete = true
|
||||||
|
}
|
||||||
|
if (path === 'output.tex') {
|
||||||
|
// created by TikzManager if present in output files
|
||||||
|
should_delete = true
|
||||||
|
}
|
||||||
|
if (should_delete) {
|
||||||
|
return jobs.push((callback) =>
|
||||||
|
ResourceWriter._deleteFileIfNotDirectory(
|
||||||
|
Path.join(basePath, path),
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
})(file)
|
||||||
|
}
|
||||||
|
|
||||||
|
return async.series(jobs, function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return callback(null, outputFiles, allFiles)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_deleteFileIfNotDirectory(path, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return fs.stat(path, function (error, stat) {
|
||||||
|
if (error != null && error.code === 'ENOENT') {
|
||||||
|
return callback()
|
||||||
|
} else if (error != null) {
|
||||||
|
logger.err(
|
||||||
|
{ err: error, path },
|
||||||
|
'error stating file in deleteFileIfNotDirectory'
|
||||||
|
)
|
||||||
|
return callback(error)
|
||||||
|
} else if (stat.isFile()) {
|
||||||
|
return fs.unlink(path, function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
logger.err(
|
||||||
|
{ err: error, path },
|
||||||
|
'error removing file in deleteFileIfNotDirectory'
|
||||||
|
)
|
||||||
|
return callback(error)
|
||||||
|
} else {
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_writeResourceToDisk(project_id, resource, basePath, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return ResourceWriter.checkPath(basePath, resource.path, function (
|
||||||
|
error,
|
||||||
|
path
|
||||||
|
) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return fs.mkdir(Path.dirname(path), { recursive: true }, function (
|
||||||
|
error
|
||||||
|
) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
// TODO: Don't overwrite file if it hasn't been modified
|
||||||
|
if (resource.url != null) {
|
||||||
|
return UrlCache.downloadUrlToFile(
|
||||||
|
project_id,
|
||||||
|
resource.url,
|
||||||
|
path,
|
||||||
|
resource.modified,
|
||||||
|
function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
logger.err(
|
||||||
|
{
|
||||||
|
err,
|
||||||
|
project_id,
|
||||||
|
path,
|
||||||
|
resource_url: resource.url,
|
||||||
|
modified: resource.modified
|
||||||
|
},
|
||||||
|
'error downloading file for resources'
|
||||||
|
)
|
||||||
|
Metrics.inc('download-failed')
|
||||||
|
}
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
) // try and continue compiling even if http resource can not be downloaded at this time
|
||||||
|
} else {
|
||||||
|
const process = require('process')
|
||||||
|
fs.writeFile(path, resource.content, callback)
|
||||||
|
try {
|
||||||
|
let result
|
||||||
|
return (result = fs.lstatSync(path))
|
||||||
|
} catch (e) {}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
checkPath(basePath, resourcePath, callback) {
|
||||||
|
const path = Path.normalize(Path.join(basePath, resourcePath))
|
||||||
|
if (path.slice(0, basePath.length + 1) !== basePath + '/') {
|
||||||
|
return callback(new Error('resource path is outside root directory'))
|
||||||
|
} else {
|
||||||
|
return callback(null, path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
60
app/js/SafeReader.js
Normal file
60
app/js/SafeReader.js
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
/* eslint-disable
|
||||||
|
handle-callback-err,
|
||||||
|
no-unused-vars,
|
||||||
|
node/no-deprecated-api,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS101: Remove unnecessary use of Array.from
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let SafeReader
|
||||||
|
const fs = require('fs')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
|
||||||
|
module.exports = SafeReader = {
|
||||||
|
// safely read up to size bytes from a file and return result as a
|
||||||
|
// string
|
||||||
|
|
||||||
|
readFile(file, size, encoding, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, result) {}
|
||||||
|
}
|
||||||
|
return fs.open(file, 'r', function (err, fd) {
|
||||||
|
if (err != null && err.code === 'ENOENT') {
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// safely return always closing the file
|
||||||
|
const callbackWithClose = (err, ...result) =>
|
||||||
|
fs.close(fd, function (err1) {
|
||||||
|
if (err != null) {
|
||||||
|
return callback(err)
|
||||||
|
}
|
||||||
|
if (err1 != null) {
|
||||||
|
return callback(err1)
|
||||||
|
}
|
||||||
|
return callback(null, ...Array.from(result))
|
||||||
|
})
|
||||||
|
const buff = Buffer.alloc(size) // fills with zeroes by default
|
||||||
|
return fs.read(fd, buff, 0, buff.length, 0, function (
|
||||||
|
err,
|
||||||
|
bytesRead,
|
||||||
|
buffer
|
||||||
|
) {
|
||||||
|
if (err != null) {
|
||||||
|
return callbackWithClose(err)
|
||||||
|
}
|
||||||
|
const result = buffer.toString(encoding, 0, bytesRead)
|
||||||
|
return callbackWithClose(null, result, bytesRead)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
94
app/js/StaticServerForbidSymlinks.js
Normal file
94
app/js/StaticServerForbidSymlinks.js
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
no-cond-assign,
|
||||||
|
no-unused-vars,
|
||||||
|
node/no-deprecated-api,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS101: Remove unnecessary use of Array.from
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS103: Rewrite code to no longer use __guard__
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let ForbidSymlinks
|
||||||
|
const Path = require('path')
|
||||||
|
const fs = require('fs')
|
||||||
|
const Settings = require('settings-sharelatex')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const url = require('url')
|
||||||
|
|
||||||
|
module.exports = ForbidSymlinks = function (staticFn, root, options) {
|
||||||
|
const expressStatic = staticFn(root, options)
|
||||||
|
const basePath = Path.resolve(root)
|
||||||
|
return function (req, res, next) {
|
||||||
|
let file, project_id, result
|
||||||
|
const path = __guard__(url.parse(req.url), (x) => x.pathname)
|
||||||
|
// check that the path is of the form /project_id_or_name/path/to/file.log
|
||||||
|
if ((result = path.match(/^\/?([a-zA-Z0-9_-]+)\/(.*)/))) {
|
||||||
|
project_id = result[1]
|
||||||
|
file = result[2]
|
||||||
|
} else {
|
||||||
|
logger.warn({ path }, 'unrecognized file request')
|
||||||
|
return res.sendStatus(404)
|
||||||
|
}
|
||||||
|
// check that the file does not use a relative path
|
||||||
|
for (const dir of Array.from(file.split('/'))) {
|
||||||
|
if (dir === '..') {
|
||||||
|
logger.warn({ path }, 'attempt to use a relative path')
|
||||||
|
return res.sendStatus(404)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// check that the requested path is normalized
|
||||||
|
const requestedFsPath = `${basePath}/${project_id}/${file}`
|
||||||
|
if (requestedFsPath !== Path.normalize(requestedFsPath)) {
|
||||||
|
logger.error(
|
||||||
|
{ path: requestedFsPath },
|
||||||
|
'requestedFsPath is not normalized'
|
||||||
|
)
|
||||||
|
return res.sendStatus(404)
|
||||||
|
}
|
||||||
|
// check that the requested path is not a symlink
|
||||||
|
return fs.realpath(requestedFsPath, function (err, realFsPath) {
|
||||||
|
if (err != null) {
|
||||||
|
if (err.code === 'ENOENT') {
|
||||||
|
return res.sendStatus(404)
|
||||||
|
} else {
|
||||||
|
logger.error(
|
||||||
|
{
|
||||||
|
err,
|
||||||
|
requestedFsPath,
|
||||||
|
realFsPath,
|
||||||
|
path: req.params[0],
|
||||||
|
project_id: req.params.project_id
|
||||||
|
},
|
||||||
|
'error checking file access'
|
||||||
|
)
|
||||||
|
return res.sendStatus(500)
|
||||||
|
}
|
||||||
|
} else if (requestedFsPath !== realFsPath) {
|
||||||
|
logger.warn(
|
||||||
|
{
|
||||||
|
requestedFsPath,
|
||||||
|
realFsPath,
|
||||||
|
path: req.params[0],
|
||||||
|
project_id: req.params.project_id
|
||||||
|
},
|
||||||
|
'trying to access a different file (symlink), aborting'
|
||||||
|
)
|
||||||
|
return res.sendStatus(404)
|
||||||
|
} else {
|
||||||
|
return expressStatic(req, res, next)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function __guard__(value, transform) {
|
||||||
|
return typeof value !== 'undefined' && value !== null
|
||||||
|
? transform(value)
|
||||||
|
: undefined
|
||||||
|
}
|
||||||
97
app/js/TikzManager.js
Normal file
97
app/js/TikzManager.js
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
/* eslint-disable
|
||||||
|
handle-callback-err,
|
||||||
|
no-unused-vars,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS101: Remove unnecessary use of Array.from
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let TikzManager
|
||||||
|
const fs = require('fs')
|
||||||
|
const Path = require('path')
|
||||||
|
const ResourceWriter = require('./ResourceWriter')
|
||||||
|
const SafeReader = require('./SafeReader')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
|
||||||
|
// for \tikzexternalize or pstool to work the main file needs to match the
|
||||||
|
// jobname. Since we set the -jobname to output, we have to create a
|
||||||
|
// copy of the main file as 'output.tex'.
|
||||||
|
|
||||||
|
module.exports = TikzManager = {
|
||||||
|
checkMainFile(compileDir, mainFile, resources, callback) {
|
||||||
|
// if there's already an output.tex file, we don't want to touch it
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, needsMainFile) {}
|
||||||
|
}
|
||||||
|
for (const resource of Array.from(resources)) {
|
||||||
|
if (resource.path === 'output.tex') {
|
||||||
|
logger.log({ compileDir, mainFile }, 'output.tex already in resources')
|
||||||
|
return callback(null, false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// if there's no output.tex, see if we are using tikz/pgf or pstool in the main file
|
||||||
|
return ResourceWriter.checkPath(compileDir, mainFile, function (
|
||||||
|
error,
|
||||||
|
path
|
||||||
|
) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return SafeReader.readFile(path, 65536, 'utf8', function (
|
||||||
|
error,
|
||||||
|
content
|
||||||
|
) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
const usesTikzExternalize =
|
||||||
|
(content != null
|
||||||
|
? content.indexOf('\\tikzexternalize')
|
||||||
|
: undefined) >= 0
|
||||||
|
const usesPsTool =
|
||||||
|
(content != null ? content.indexOf('{pstool}') : undefined) >= 0
|
||||||
|
logger.log(
|
||||||
|
{ compileDir, mainFile, usesTikzExternalize, usesPsTool },
|
||||||
|
'checked for packages needing main file as output.tex'
|
||||||
|
)
|
||||||
|
const needsMainFile = usesTikzExternalize || usesPsTool
|
||||||
|
return callback(null, needsMainFile)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
injectOutputFile(compileDir, mainFile, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return ResourceWriter.checkPath(compileDir, mainFile, function (
|
||||||
|
error,
|
||||||
|
path
|
||||||
|
) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return fs.readFile(path, 'utf8', function (error, content) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
logger.log(
|
||||||
|
{ compileDir, mainFile },
|
||||||
|
'copied file to output.tex as project uses packages which require it'
|
||||||
|
)
|
||||||
|
// use wx flag to ensure that output file does not already exist
|
||||||
|
return fs.writeFile(
|
||||||
|
Path.join(compileDir, 'output.tex'),
|
||||||
|
content,
|
||||||
|
{ flag: 'wx' },
|
||||||
|
callback
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
273
app/js/UrlCache.js
Normal file
273
app/js/UrlCache.js
Normal file
@@ -0,0 +1,273 @@
|
|||||||
|
/* eslint-disable
|
||||||
|
camelcase,
|
||||||
|
handle-callback-err,
|
||||||
|
no-return-assign,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS101: Remove unnecessary use of Array.from
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let UrlCache
|
||||||
|
const db = require('./db')
|
||||||
|
const dbQueue = require('./DbQueue')
|
||||||
|
const UrlFetcher = require('./UrlFetcher')
|
||||||
|
const Settings = require('settings-sharelatex')
|
||||||
|
const crypto = require('crypto')
|
||||||
|
const fs = require('fs')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const async = require('async')
|
||||||
|
|
||||||
|
module.exports = UrlCache = {
|
||||||
|
downloadUrlToFile(project_id, url, destPath, lastModified, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return UrlCache._ensureUrlIsInCache(
|
||||||
|
project_id,
|
||||||
|
url,
|
||||||
|
lastModified,
|
||||||
|
(error, pathToCachedUrl) => {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return UrlCache._copyFile(pathToCachedUrl, destPath, function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return UrlCache._clearUrlDetails(project_id, url, () =>
|
||||||
|
callback(error)
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
clearProject(project_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return UrlCache._findAllUrlsInProject(project_id, function (error, urls) {
|
||||||
|
logger.log(
|
||||||
|
{ project_id, url_count: urls.length },
|
||||||
|
'clearing project URLs'
|
||||||
|
)
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
const jobs = Array.from(urls || []).map((url) =>
|
||||||
|
((url) => (callback) =>
|
||||||
|
UrlCache._clearUrlFromCache(project_id, url, function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
logger.error(
|
||||||
|
{ err: error, project_id, url },
|
||||||
|
'error clearing project URL'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return callback()
|
||||||
|
}))(url)
|
||||||
|
)
|
||||||
|
return async.series(jobs, callback)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_ensureUrlIsInCache(project_id, url, lastModified, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, pathOnDisk) {}
|
||||||
|
}
|
||||||
|
if (lastModified != null) {
|
||||||
|
// MYSQL only stores dates to an accuracy of a second but the incoming lastModified might have milliseconds.
|
||||||
|
// So round down to seconds
|
||||||
|
lastModified = new Date(Math.floor(lastModified.getTime() / 1000) * 1000)
|
||||||
|
}
|
||||||
|
return UrlCache._doesUrlNeedDownloading(
|
||||||
|
project_id,
|
||||||
|
url,
|
||||||
|
lastModified,
|
||||||
|
(error, needsDownloading) => {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
if (needsDownloading) {
|
||||||
|
logger.log({ url, lastModified }, 'downloading URL')
|
||||||
|
return UrlFetcher.pipeUrlToFileWithRetry(
|
||||||
|
url,
|
||||||
|
UrlCache._cacheFilePathForUrl(project_id, url),
|
||||||
|
(error) => {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return UrlCache._updateOrCreateUrlDetails(
|
||||||
|
project_id,
|
||||||
|
url,
|
||||||
|
lastModified,
|
||||||
|
(error) => {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return callback(
|
||||||
|
null,
|
||||||
|
UrlCache._cacheFilePathForUrl(project_id, url)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
logger.log({ url, lastModified }, 'URL is up to date in cache')
|
||||||
|
return callback(null, UrlCache._cacheFilePathForUrl(project_id, url))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
_doesUrlNeedDownloading(project_id, url, lastModified, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, needsDownloading) {}
|
||||||
|
}
|
||||||
|
if (lastModified == null) {
|
||||||
|
return callback(null, true)
|
||||||
|
}
|
||||||
|
return UrlCache._findUrlDetails(project_id, url, function (
|
||||||
|
error,
|
||||||
|
urlDetails
|
||||||
|
) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
urlDetails == null ||
|
||||||
|
urlDetails.lastModified == null ||
|
||||||
|
urlDetails.lastModified.getTime() < lastModified.getTime()
|
||||||
|
) {
|
||||||
|
return callback(null, true)
|
||||||
|
} else {
|
||||||
|
return callback(null, false)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_cacheFileNameForUrl(project_id, url) {
|
||||||
|
return project_id + ':' + crypto.createHash('md5').update(url).digest('hex')
|
||||||
|
},
|
||||||
|
|
||||||
|
_cacheFilePathForUrl(project_id, url) {
|
||||||
|
return `${Settings.path.clsiCacheDir}/${UrlCache._cacheFileNameForUrl(
|
||||||
|
project_id,
|
||||||
|
url
|
||||||
|
)}`
|
||||||
|
},
|
||||||
|
|
||||||
|
_copyFile(from, to, _callback) {
|
||||||
|
if (_callback == null) {
|
||||||
|
_callback = function (error) {}
|
||||||
|
}
|
||||||
|
const callbackOnce = function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
logger.error({ err: error, from, to }, 'error copying file from cache')
|
||||||
|
}
|
||||||
|
_callback(error)
|
||||||
|
return (_callback = function () {})
|
||||||
|
}
|
||||||
|
const writeStream = fs.createWriteStream(to)
|
||||||
|
const readStream = fs.createReadStream(from)
|
||||||
|
writeStream.on('error', callbackOnce)
|
||||||
|
readStream.on('error', callbackOnce)
|
||||||
|
writeStream.on('close', callbackOnce)
|
||||||
|
return writeStream.on('open', () => readStream.pipe(writeStream))
|
||||||
|
},
|
||||||
|
|
||||||
|
_clearUrlFromCache(project_id, url, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return UrlCache._clearUrlDetails(project_id, url, function (error) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return UrlCache._deleteUrlCacheFromDisk(project_id, url, function (
|
||||||
|
error
|
||||||
|
) {
|
||||||
|
if (error != null) {
|
||||||
|
return callback(error)
|
||||||
|
}
|
||||||
|
return callback(null)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_deleteUrlCacheFromDisk(project_id, url, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
return fs.unlink(UrlCache._cacheFilePathForUrl(project_id, url), function (
|
||||||
|
error
|
||||||
|
) {
|
||||||
|
if (error != null && error.code !== 'ENOENT') {
|
||||||
|
// no error if the file isn't present
|
||||||
|
return callback(error)
|
||||||
|
} else {
|
||||||
|
return callback()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
_findUrlDetails(project_id, url, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, urlDetails) {}
|
||||||
|
}
|
||||||
|
const job = (cb) =>
|
||||||
|
db.UrlCache.findOne({ where: { url, project_id } })
|
||||||
|
.then((urlDetails) => cb(null, urlDetails))
|
||||||
|
.error(cb)
|
||||||
|
return dbQueue.queue.push(job, callback)
|
||||||
|
},
|
||||||
|
|
||||||
|
_updateOrCreateUrlDetails(project_id, url, lastModified, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
const job = (cb) =>
|
||||||
|
db.UrlCache.findOrCreate({ where: { url, project_id } })
|
||||||
|
.spread((urlDetails, created) =>
|
||||||
|
urlDetails
|
||||||
|
.update({ lastModified })
|
||||||
|
.then(() => cb())
|
||||||
|
.error(cb)
|
||||||
|
)
|
||||||
|
.error(cb)
|
||||||
|
return dbQueue.queue.push(job, callback)
|
||||||
|
},
|
||||||
|
|
||||||
|
_clearUrlDetails(project_id, url, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error) {}
|
||||||
|
}
|
||||||
|
const job = (cb) =>
|
||||||
|
db.UrlCache.destroy({ where: { url, project_id } })
|
||||||
|
.then(() => cb(null))
|
||||||
|
.error(cb)
|
||||||
|
return dbQueue.queue.push(job, callback)
|
||||||
|
},
|
||||||
|
|
||||||
|
_findAllUrlsInProject(project_id, callback) {
|
||||||
|
if (callback == null) {
|
||||||
|
callback = function (error, urls) {}
|
||||||
|
}
|
||||||
|
const job = (cb) =>
|
||||||
|
db.UrlCache.findAll({ where: { project_id } })
|
||||||
|
.then((urlEntries) =>
|
||||||
|
cb(
|
||||||
|
null,
|
||||||
|
urlEntries.map((entry) => entry.url)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.error(cb)
|
||||||
|
return dbQueue.queue.push(job, callback)
|
||||||
|
}
|
||||||
|
}
|
||||||
128
app/js/UrlFetcher.js
Normal file
128
app/js/UrlFetcher.js
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
/* eslint-disable
|
||||||
|
handle-callback-err,
|
||||||
|
no-return-assign,
|
||||||
|
no-unused-vars,
|
||||||
|
node/no-deprecated-api,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* DS207: Consider shorter variations of null checks
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
let UrlFetcher
|
||||||
|
const request = require('request').defaults({ jar: false })
|
||||||
|
const fs = require('fs')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
const settings = require('settings-sharelatex')
|
||||||
|
const URL = require('url')
|
||||||
|
const async = require('async')
|
||||||
|
|
||||||
|
const oneMinute = 60 * 1000
|
||||||
|
|
||||||
|
module.exports = UrlFetcher = {
|
||||||
|
pipeUrlToFileWithRetry(url, filePath, callback) {
|
||||||
|
const doDownload = function (cb) {
|
||||||
|
UrlFetcher.pipeUrlToFile(url, filePath, cb)
|
||||||
|
}
|
||||||
|
async.retry(3, doDownload, callback)
|
||||||
|
},
|
||||||
|
|
||||||
|
pipeUrlToFile(url, filePath, _callback) {
|
||||||
|
if (_callback == null) {
|
||||||
|
_callback = function (error) {}
|
||||||
|
}
|
||||||
|
const callbackOnce = function (error) {
|
||||||
|
if (timeoutHandler != null) {
|
||||||
|
clearTimeout(timeoutHandler)
|
||||||
|
}
|
||||||
|
_callback(error)
|
||||||
|
return (_callback = function () {})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (settings.filestoreDomainOveride != null) {
|
||||||
|
const p = URL.parse(url).path
|
||||||
|
url = `${settings.filestoreDomainOveride}${p}`
|
||||||
|
}
|
||||||
|
var timeoutHandler = setTimeout(
|
||||||
|
function () {
|
||||||
|
timeoutHandler = null
|
||||||
|
logger.error({ url, filePath }, 'Timed out downloading file to cache')
|
||||||
|
return callbackOnce(
|
||||||
|
new Error(`Timed out downloading file to cache ${url}`)
|
||||||
|
)
|
||||||
|
},
|
||||||
|
// FIXME: maybe need to close fileStream here
|
||||||
|
3 * oneMinute
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.log({ url, filePath }, 'started downloading url to cache')
|
||||||
|
const urlStream = request.get({ url, timeout: oneMinute })
|
||||||
|
urlStream.pause() // stop data flowing until we are ready
|
||||||
|
|
||||||
|
// attach handlers before setting up pipes
|
||||||
|
urlStream.on('error', function (error) {
|
||||||
|
logger.error({ err: error, url, filePath }, 'error downloading url')
|
||||||
|
return callbackOnce(
|
||||||
|
error || new Error(`Something went wrong downloading the URL ${url}`)
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
urlStream.on('end', () =>
|
||||||
|
logger.log({ url, filePath }, 'finished downloading file into cache')
|
||||||
|
)
|
||||||
|
|
||||||
|
return urlStream.on('response', function (res) {
|
||||||
|
if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||||
|
const fileStream = fs.createWriteStream(filePath)
|
||||||
|
|
||||||
|
// attach handlers before setting up pipes
|
||||||
|
fileStream.on('error', function (error) {
|
||||||
|
logger.error(
|
||||||
|
{ err: error, url, filePath },
|
||||||
|
'error writing file into cache'
|
||||||
|
)
|
||||||
|
return fs.unlink(filePath, function (err) {
|
||||||
|
if (err != null) {
|
||||||
|
logger.err({ err, filePath }, 'error deleting file from cache')
|
||||||
|
}
|
||||||
|
return callbackOnce(error)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
fileStream.on('finish', function () {
|
||||||
|
logger.log({ url, filePath }, 'finished writing file into cache')
|
||||||
|
return callbackOnce()
|
||||||
|
})
|
||||||
|
|
||||||
|
fileStream.on('pipe', () =>
|
||||||
|
logger.log({ url, filePath }, 'piping into filestream')
|
||||||
|
)
|
||||||
|
|
||||||
|
urlStream.pipe(fileStream)
|
||||||
|
return urlStream.resume() // now we are ready to handle the data
|
||||||
|
} else {
|
||||||
|
logger.error(
|
||||||
|
{ statusCode: res.statusCode, url, filePath },
|
||||||
|
'unexpected status code downloading url to cache'
|
||||||
|
)
|
||||||
|
// https://nodejs.org/api/http.html#http_class_http_clientrequest
|
||||||
|
// If you add a 'response' event handler, then you must consume
|
||||||
|
// the data from the response object, either by calling
|
||||||
|
// response.read() whenever there is a 'readable' event, or by
|
||||||
|
// adding a 'data' handler, or by calling the .resume()
|
||||||
|
// method. Until the data is consumed, the 'end' event will not
|
||||||
|
// fire. Also, until the data is read it will consume memory
|
||||||
|
// that can eventually lead to a 'process out of memory' error.
|
||||||
|
urlStream.resume() // discard the data
|
||||||
|
return callbackOnce(
|
||||||
|
new Error(
|
||||||
|
`URL returned non-success status code: ${res.statusCode} ${url}`
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
67
app/js/db.js
Normal file
67
app/js/db.js
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
/* eslint-disable
|
||||||
|
no-console,
|
||||||
|
*/
|
||||||
|
// TODO: This file was created by bulk-decaffeinate.
|
||||||
|
// Fix any style issues and re-enable lint.
|
||||||
|
/*
|
||||||
|
* decaffeinate suggestions:
|
||||||
|
* DS102: Remove unnecessary code created because of implicit returns
|
||||||
|
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||||
|
*/
|
||||||
|
const Sequelize = require('sequelize')
|
||||||
|
const Settings = require('settings-sharelatex')
|
||||||
|
const _ = require('lodash')
|
||||||
|
const logger = require('logger-sharelatex')
|
||||||
|
|
||||||
|
const options = _.extend({ logging: false }, Settings.mysql.clsi)
|
||||||
|
|
||||||
|
logger.log({ dbPath: Settings.mysql.clsi.storage }, 'connecting to db')
|
||||||
|
|
||||||
|
const sequelize = new Sequelize(
|
||||||
|
Settings.mysql.clsi.database,
|
||||||
|
Settings.mysql.clsi.username,
|
||||||
|
Settings.mysql.clsi.password,
|
||||||
|
options
|
||||||
|
)
|
||||||
|
|
||||||
|
if (Settings.mysql.clsi.dialect === 'sqlite') {
|
||||||
|
logger.log('running PRAGMA journal_mode=WAL;')
|
||||||
|
sequelize.query('PRAGMA journal_mode=WAL;')
|
||||||
|
sequelize.query('PRAGMA synchronous=OFF;')
|
||||||
|
sequelize.query('PRAGMA read_uncommitted = true;')
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
UrlCache: sequelize.define(
|
||||||
|
'UrlCache',
|
||||||
|
{
|
||||||
|
url: Sequelize.STRING,
|
||||||
|
project_id: Sequelize.STRING,
|
||||||
|
lastModified: Sequelize.DATE
|
||||||
|
},
|
||||||
|
{
|
||||||
|
indexes: [{ fields: ['url', 'project_id'] }, { fields: ['project_id'] }]
|
||||||
|
}
|
||||||
|
),
|
||||||
|
|
||||||
|
Project: sequelize.define(
|
||||||
|
'Project',
|
||||||
|
{
|
||||||
|
project_id: { type: Sequelize.STRING, primaryKey: true },
|
||||||
|
lastAccessed: Sequelize.DATE
|
||||||
|
},
|
||||||
|
{
|
||||||
|
indexes: [{ fields: ['lastAccessed'] }]
|
||||||
|
}
|
||||||
|
),
|
||||||
|
|
||||||
|
op: Sequelize.Op,
|
||||||
|
|
||||||
|
sync() {
|
||||||
|
logger.log({ dbPath: Settings.mysql.clsi.storage }, 'syncing db schema')
|
||||||
|
return sequelize
|
||||||
|
.sync()
|
||||||
|
.then(() => logger.log('db sync complete'))
|
||||||
|
.catch((err) => console.log(err, 'error syncing'))
|
||||||
|
}
|
||||||
|
}
|
||||||
4
bin/acceptance_test
Normal file
4
bin/acceptance_test
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e;
|
||||||
|
MOCHA="node_modules/.bin/mocha --recursive --reporter spec --timeout 15000"
|
||||||
|
$MOCHA "$@"
|
||||||
BIN
bin/synctex
Executable file
BIN
bin/synctex
Executable file
Binary file not shown.
9
buildscript.txt
Normal file
9
buildscript.txt
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
clsi
|
||||||
|
--data-dirs=cache,compiles,db
|
||||||
|
--dependencies=
|
||||||
|
--docker-repos=gcr.io/overleaf-ops
|
||||||
|
--env-add=
|
||||||
|
--env-pass-through=TEXLIVE_IMAGE
|
||||||
|
--node-version=10.22.1
|
||||||
|
--public-repo=True
|
||||||
|
--script-version=3.3.3
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
Path = require "path"
|
|
||||||
|
|
||||||
module.exports =
|
|
||||||
# Options are passed to Sequelize.
|
|
||||||
# See http://sequelizejs.com/documentation#usage-options for details
|
|
||||||
mysql:
|
|
||||||
clsi:
|
|
||||||
database: "clsi"
|
|
||||||
username: "clsi"
|
|
||||||
password: null
|
|
||||||
dialect: "sqlite"
|
|
||||||
storage: Path.resolve(__dirname + "/../db.sqlite")
|
|
||||||
|
|
||||||
path:
|
|
||||||
compilesDir: Path.resolve(__dirname + "/../compiles")
|
|
||||||
clsiCacheDir: Path.resolve(__dirname + "/../cache")
|
|
||||||
synctexBaseDir: (project_id) -> Path.join(@compilesDir, project_id)
|
|
||||||
|
|
||||||
# clsi:
|
|
||||||
# commandRunner: "docker-runner-sharelatex"
|
|
||||||
# docker:
|
|
||||||
# image: "quay.io/sharelatex/texlive-full"
|
|
||||||
# env:
|
|
||||||
# PATH: "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/2013/bin/x86_64-linux/"
|
|
||||||
# HOME: "/tmp"
|
|
||||||
# modem:
|
|
||||||
# socketPath: false
|
|
||||||
# user: "tex"
|
|
||||||
|
|
||||||
internal:
|
|
||||||
clsi:
|
|
||||||
port: 3013
|
|
||||||
host: "localhost"
|
|
||||||
|
|
||||||
apis:
|
|
||||||
clsi:
|
|
||||||
url: "http://localhost:3013"
|
|
||||||
|
|
||||||
smokeTest: false
|
|
||||||
148
config/settings.defaults.js
Normal file
148
config/settings.defaults.js
Normal file
@@ -0,0 +1,148 @@
|
|||||||
|
const Path = require('path')
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
// Options are passed to Sequelize.
|
||||||
|
// See http://sequelizejs.com/documentation#usage-options for details
|
||||||
|
mysql: {
|
||||||
|
clsi: {
|
||||||
|
database: 'clsi',
|
||||||
|
username: 'clsi',
|
||||||
|
dialect: 'sqlite',
|
||||||
|
storage:
|
||||||
|
process.env.SQLITE_PATH || Path.resolve(__dirname, '../db/db.sqlite'),
|
||||||
|
pool: {
|
||||||
|
max: 1,
|
||||||
|
min: 1
|
||||||
|
},
|
||||||
|
retry: {
|
||||||
|
max: 10
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
compileSizeLimit: process.env.COMPILE_SIZE_LIMIT || '7mb',
|
||||||
|
|
||||||
|
processLifespanLimitMs:
|
||||||
|
parseInt(process.env.PROCESS_LIFE_SPAN_LIMIT_MS) || 60 * 60 * 24 * 1000 * 2,
|
||||||
|
|
||||||
|
path: {
|
||||||
|
compilesDir: Path.resolve(__dirname, '../compiles'),
|
||||||
|
clsiCacheDir: Path.resolve(__dirname, '../cache'),
|
||||||
|
synctexBaseDir(projectId) {
|
||||||
|
return Path.join(this.compilesDir, projectId)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
internal: {
|
||||||
|
clsi: {
|
||||||
|
port: 3013,
|
||||||
|
host: process.env.LISTEN_ADDRESS || 'localhost'
|
||||||
|
},
|
||||||
|
|
||||||
|
load_balancer_agent: {
|
||||||
|
report_load: true,
|
||||||
|
load_port: 3048,
|
||||||
|
local_port: 3049
|
||||||
|
}
|
||||||
|
},
|
||||||
|
apis: {
|
||||||
|
clsi: {
|
||||||
|
url: `http://${process.env.CLSI_HOST || 'localhost'}:3013`
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
smokeTest: process.env.SMOKE_TEST || false,
|
||||||
|
project_cache_length_ms: 1000 * 60 * 60 * 24,
|
||||||
|
parallelFileDownloads: process.env.FILESTORE_PARALLEL_FILE_DOWNLOADS || 1,
|
||||||
|
parallelSqlQueryLimit: process.env.FILESTORE_PARALLEL_SQL_QUERY_LIMIT || 1,
|
||||||
|
filestoreDomainOveride: process.env.FILESTORE_DOMAIN_OVERRIDE,
|
||||||
|
texliveImageNameOveride: process.env.TEX_LIVE_IMAGE_NAME_OVERRIDE,
|
||||||
|
texliveOpenoutAny: process.env.TEXLIVE_OPENOUT_ANY,
|
||||||
|
sentry: {
|
||||||
|
dsn: process.env.SENTRY_DSN
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.ALLOWED_COMPILE_GROUPS) {
|
||||||
|
try {
|
||||||
|
module.exports.allowedCompileGroups = process.env.ALLOWED_COMPILE_GROUPS.split(
|
||||||
|
' '
|
||||||
|
)
|
||||||
|
} catch (error) {
|
||||||
|
console.error(error, 'could not apply allowed compile group setting')
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.DOCKER_RUNNER) {
|
||||||
|
let seccompProfilePath
|
||||||
|
module.exports.clsi = {
|
||||||
|
dockerRunner: process.env.DOCKER_RUNNER === 'true',
|
||||||
|
docker: {
|
||||||
|
runtime: process.env.DOCKER_RUNTIME,
|
||||||
|
image:
|
||||||
|
process.env.TEXLIVE_IMAGE || 'quay.io/sharelatex/texlive-full:2017.1',
|
||||||
|
env: {
|
||||||
|
HOME: '/tmp'
|
||||||
|
},
|
||||||
|
socketPath: '/var/run/docker.sock',
|
||||||
|
user: process.env.TEXLIVE_IMAGE_USER || 'tex'
|
||||||
|
},
|
||||||
|
optimiseInDocker: true,
|
||||||
|
expireProjectAfterIdleMs: 24 * 60 * 60 * 1000,
|
||||||
|
checkProjectsIntervalMs: 10 * 60 * 1000
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Override individual docker settings using path-based keys, e.g.:
|
||||||
|
// compileGroupDockerConfigs = {
|
||||||
|
// priority: { 'HostConfig.CpuShares': 100 }
|
||||||
|
// beta: { 'dotted.path.here', 'value'}
|
||||||
|
// }
|
||||||
|
const compileGroupConfig = JSON.parse(
|
||||||
|
process.env.COMPILE_GROUP_DOCKER_CONFIGS || '{}'
|
||||||
|
)
|
||||||
|
// Automatically clean up wordcount and synctex containers
|
||||||
|
const defaultCompileGroupConfig = {
|
||||||
|
wordcount: { 'HostConfig.AutoRemove': true },
|
||||||
|
synctex: { 'HostConfig.AutoRemove': true }
|
||||||
|
}
|
||||||
|
module.exports.clsi.docker.compileGroupConfig = Object.assign(
|
||||||
|
defaultCompileGroupConfig,
|
||||||
|
compileGroupConfig
|
||||||
|
)
|
||||||
|
} catch (error) {
|
||||||
|
console.error(error, 'could not apply compile group docker configs')
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
seccompProfilePath = Path.resolve(__dirname, '../seccomp/clsi-profile.json')
|
||||||
|
module.exports.clsi.docker.seccomp_profile = JSON.stringify(
|
||||||
|
JSON.parse(require('fs').readFileSync(seccompProfilePath))
|
||||||
|
)
|
||||||
|
} catch (error) {
|
||||||
|
console.error(
|
||||||
|
error,
|
||||||
|
`could not load seccomp profile from ${seccompProfilePath}`
|
||||||
|
)
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.ALLOWED_IMAGES) {
|
||||||
|
try {
|
||||||
|
module.exports.clsi.docker.allowedImages = process.env.ALLOWED_IMAGES.split(
|
||||||
|
' '
|
||||||
|
)
|
||||||
|
} catch (error) {
|
||||||
|
console.error(error, 'could not apply allowed images setting')
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports.path.synctexBaseDir = () => '/compile'
|
||||||
|
|
||||||
|
module.exports.path.sandboxedCompilesHostDir = process.env.COMPILES_HOST_DIR
|
||||||
|
|
||||||
|
module.exports.path.synctexBinHostPath = process.env.SYNCTEX_BIN_HOST_PATH
|
||||||
|
}
|
||||||
2
db/.gitignore
vendored
Normal file
2
db/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
*
|
||||||
|
!.gitignore
|
||||||
5
debug
Executable file
5
debug
Executable file
@@ -0,0 +1,5 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
echo "hello world"
|
||||||
|
sleep 3
|
||||||
|
echo "awake"
|
||||||
|
/opt/synctex pdf /compile/output.pdf 1 100 200
|
||||||
34
docker-compose-config.yml
Normal file
34
docker-compose-config.yml
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
version: "2.3"
|
||||||
|
|
||||||
|
services:
|
||||||
|
dev:
|
||||||
|
environment:
|
||||||
|
ALLOWED_IMAGES: "quay.io/sharelatex/texlive-full:2017.1"
|
||||||
|
TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1
|
||||||
|
TEXLIVE_IMAGE_USER: "tex"
|
||||||
|
SHARELATEX_CONFIG: /app/config/settings.defaults.coffee
|
||||||
|
DOCKER_RUNNER: "true"
|
||||||
|
COMPILES_HOST_DIR: $PWD/compiles
|
||||||
|
SYNCTEX_BIN_HOST_PATH: $PWD/bin/synctex
|
||||||
|
volumes:
|
||||||
|
- /var/run/docker.sock:/var/run/docker.sock
|
||||||
|
- ./compiles:/app/compiles
|
||||||
|
- ./cache:/app/cache
|
||||||
|
- ./bin/synctex:/app/bin/synctex
|
||||||
|
|
||||||
|
|
||||||
|
ci:
|
||||||
|
environment:
|
||||||
|
ALLOWED_IMAGES: ${TEXLIVE_IMAGE}
|
||||||
|
TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1
|
||||||
|
TEXLIVE_IMAGE_USER: "tex"
|
||||||
|
SHARELATEX_CONFIG: /app/config/settings.defaults.coffee
|
||||||
|
DOCKER_RUNNER: "true"
|
||||||
|
COMPILES_HOST_DIR: $PWD/compiles
|
||||||
|
SYNCTEX_BIN_HOST_PATH: $PWD/bin/synctex
|
||||||
|
SQLITE_PATH: /app/compiles/db.sqlite
|
||||||
|
volumes:
|
||||||
|
- /var/run/docker.sock:/var/run/docker.sock:rw
|
||||||
|
- ./compiles:/app/compiles
|
||||||
|
- ./cache:/app/cache
|
||||||
|
- ./bin/synctex:/app/bin/synctex
|
||||||
40
docker-compose.ci.yml
Normal file
40
docker-compose.ci.yml
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
# This file was auto-generated, do not edit it directly.
|
||||||
|
# Instead run bin/update_build_scripts from
|
||||||
|
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||||
|
|
||||||
|
version: "2.3"
|
||||||
|
|
||||||
|
services:
|
||||||
|
test_unit:
|
||||||
|
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
||||||
|
command: npm run test:unit:_run
|
||||||
|
environment:
|
||||||
|
NODE_ENV: test
|
||||||
|
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||||
|
|
||||||
|
|
||||||
|
test_acceptance:
|
||||||
|
build: .
|
||||||
|
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
||||||
|
extends:
|
||||||
|
file: docker-compose-config.yml
|
||||||
|
service: ci
|
||||||
|
environment:
|
||||||
|
ELASTIC_SEARCH_DSN: es:9200
|
||||||
|
REDIS_HOST: redis
|
||||||
|
MONGO_HOST: mongo
|
||||||
|
POSTGRES_HOST: postgres
|
||||||
|
MOCHA_GREP: ${MOCHA_GREP}
|
||||||
|
NODE_ENV: test
|
||||||
|
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||||
|
TEXLIVE_IMAGE:
|
||||||
|
command: npm run test:acceptance:_run
|
||||||
|
|
||||||
|
|
||||||
|
tar:
|
||||||
|
build: .
|
||||||
|
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
||||||
|
volumes:
|
||||||
|
- ./:/tmp/build/
|
||||||
|
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
|
||||||
|
user: root
|
||||||
41
docker-compose.yml
Normal file
41
docker-compose.yml
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
# This file was auto-generated, do not edit it directly.
|
||||||
|
# Instead run bin/update_build_scripts from
|
||||||
|
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||||
|
|
||||||
|
version: "2.3"
|
||||||
|
|
||||||
|
services:
|
||||||
|
test_unit:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
target: base
|
||||||
|
volumes:
|
||||||
|
- .:/app
|
||||||
|
working_dir: /app
|
||||||
|
environment:
|
||||||
|
MOCHA_GREP: ${MOCHA_GREP}
|
||||||
|
NODE_ENV: test
|
||||||
|
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||||
|
command: npm run --silent test:unit
|
||||||
|
|
||||||
|
test_acceptance:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
target: base
|
||||||
|
volumes:
|
||||||
|
- .:/app
|
||||||
|
working_dir: /app
|
||||||
|
extends:
|
||||||
|
file: docker-compose-config.yml
|
||||||
|
service: dev
|
||||||
|
environment:
|
||||||
|
ELASTIC_SEARCH_DSN: es:9200
|
||||||
|
REDIS_HOST: redis
|
||||||
|
MONGO_HOST: mongo
|
||||||
|
POSTGRES_HOST: postgres
|
||||||
|
MOCHA_GREP: ${MOCHA_GREP}
|
||||||
|
LOG_LEVEL: ERROR
|
||||||
|
NODE_ENV: test
|
||||||
|
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||||
|
command: npm run --silent test:acceptance
|
||||||
|
|
||||||
18
entrypoint.sh
Executable file
18
entrypoint.sh
Executable file
@@ -0,0 +1,18 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
docker --version >&2
|
||||||
|
|
||||||
|
# add the node user to the docker group on the host
|
||||||
|
DOCKER_GROUP=$(stat -c '%g' /var/run/docker.sock)
|
||||||
|
groupadd --non-unique --gid ${DOCKER_GROUP} dockeronhost
|
||||||
|
usermod -aG dockeronhost node
|
||||||
|
|
||||||
|
# compatibility: initial volume setup
|
||||||
|
chown node:node /app/cache
|
||||||
|
chown node:node /app/compiles
|
||||||
|
chown node:node /app/db
|
||||||
|
|
||||||
|
# make synctex available for remount in compiles
|
||||||
|
cp /app/bin/synctex /app/bin/synctex-mount/synctex
|
||||||
|
|
||||||
|
exec runuser -u node -- "$@"
|
||||||
4
install_deps.sh
Executable file
4
install_deps.sh
Executable file
@@ -0,0 +1,4 @@
|
|||||||
|
/bin/sh
|
||||||
|
wget -qO- https://get.docker.com/ | sh
|
||||||
|
apt-get install poppler-utils vim ghostscript --yes
|
||||||
|
npm rebuild
|
||||||
41
kube.yaml
Normal file
41
kube.yaml
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
apiVersion: v1
|
||||||
|
kind: Service
|
||||||
|
metadata:
|
||||||
|
name: clsi
|
||||||
|
namespace: default
|
||||||
|
spec:
|
||||||
|
type: LoadBalancer
|
||||||
|
ports:
|
||||||
|
- port: 80
|
||||||
|
protocol: TCP
|
||||||
|
targetPort: 80
|
||||||
|
selector:
|
||||||
|
run: clsi
|
||||||
|
---
|
||||||
|
apiVersion: extensions/v1beta1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: clsi
|
||||||
|
namespace: default
|
||||||
|
spec:
|
||||||
|
replicas: 2
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
run: clsi
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: clsi
|
||||||
|
image: gcr.io/henry-terraform-admin/clsi
|
||||||
|
imagePullPolicy: Always
|
||||||
|
readinessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: status
|
||||||
|
port: 80
|
||||||
|
periodSeconds: 5
|
||||||
|
initialDelaySeconds: 0
|
||||||
|
failureThreshold: 3
|
||||||
|
successThreshold: 1
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
17
nodemon.json
Normal file
17
nodemon.json
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
{
|
||||||
|
"ignore": [
|
||||||
|
".git",
|
||||||
|
"node_modules/"
|
||||||
|
],
|
||||||
|
"verbose": true,
|
||||||
|
"legacyWatch": true,
|
||||||
|
"execMap": {
|
||||||
|
"js": "npm run start"
|
||||||
|
},
|
||||||
|
"watch": [
|
||||||
|
"app/js/",
|
||||||
|
"app.js",
|
||||||
|
"config/"
|
||||||
|
],
|
||||||
|
"ext": "js"
|
||||||
|
}
|
||||||
7382
package-lock.json
generated
Normal file
7382
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
85
package.json
85
package.json
@@ -1,42 +1,69 @@
|
|||||||
{
|
{
|
||||||
"name": "node-clsi",
|
"name": "node-clsi",
|
||||||
"description": "A Node.js implementation of the CLSI LaTeX web-API",
|
"description": "A Node.js implementation of the CLSI LaTeX web-API",
|
||||||
"version": "0.1.0",
|
"version": "0.1.4",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/sharelatex/clsi-sharelatex.git"
|
"url": "https://github.com/sharelatex/clsi-sharelatex.git"
|
||||||
},
|
},
|
||||||
|
"scripts": {
|
||||||
|
"start": "node $NODE_APP_OPTIONS app.js",
|
||||||
|
"test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js",
|
||||||
|
"test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP",
|
||||||
|
"test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js",
|
||||||
|
"test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP",
|
||||||
|
"nodemon": "nodemon --config nodemon.json",
|
||||||
|
"lint": "node_modules/.bin/eslint --max-warnings 0 .",
|
||||||
|
"format": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --list-different",
|
||||||
|
"format:fix": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --write"
|
||||||
|
},
|
||||||
"author": "James Allen <james@sharelatex.com>",
|
"author": "James Allen <james@sharelatex.com>",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"async": "0.2.9",
|
"async": "3.2.0",
|
||||||
"lynx": "0.0.11",
|
"body-parser": "^1.19.0",
|
||||||
"mkdirp": "0.3.5",
|
"diskusage": "^1.1.3",
|
||||||
"mysql": "2.0.0-alpha7",
|
"dockerode": "^3.1.0",
|
||||||
"request": "~2.21.0",
|
"express": "^4.17.1",
|
||||||
"logger-sharelatex": "git+https://github.com/sharelatex/logger-sharelatex.git#v1.0.0",
|
"fs-extra": "^8.1.0",
|
||||||
"settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.0.0",
|
"heapdump": "^0.3.15",
|
||||||
"metrics-sharelatex": "git+https://github.com/sharelatex/metrics-sharelatex.git#v1.0.0",
|
"lockfile": "^1.0.4",
|
||||||
"sequelize": "~2.0.0-beta.2",
|
"lodash": "^4.17.20",
|
||||||
"wrench": "~1.5.4",
|
"logger-sharelatex": "^2.2.0",
|
||||||
"smoke-test-sharelatex": "git+https://github.com/sharelatex/smoke-test-sharelatex.git#v1.0.0",
|
"lynx": "0.2.0",
|
||||||
"sqlite3": "~2.2.0",
|
"metrics-sharelatex": "^2.6.0",
|
||||||
"express": "^4.2.0",
|
"mysql": "^2.18.1",
|
||||||
"body-parser": "^1.2.0"
|
"request": "^2.88.2",
|
||||||
|
"sequelize": "^5.21.5",
|
||||||
|
"settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.1.0",
|
||||||
|
"sqlite3": "^4.1.1",
|
||||||
|
"underscore": "^1.11.0",
|
||||||
|
"v8-profiler-node8": "^6.1.1",
|
||||||
|
"wrench": "~1.5.9"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"mocha": "1.10.0",
|
"babel-eslint": "^10.1.0",
|
||||||
"coffee-script": "1.6.0",
|
"bunyan": "^1.8.12",
|
||||||
"chai": "~1.8.1",
|
"chai": "~4.2.0",
|
||||||
"sinon": "~1.7.3",
|
"eslint": "^6.8.0",
|
||||||
"grunt": "~0.4.2",
|
"eslint-config-prettier": "^6.10.0",
|
||||||
"grunt-contrib-coffee": "~0.7.0",
|
"eslint-config-standard": "^14.1.0",
|
||||||
"grunt-contrib-clean": "~0.5.0",
|
"eslint-config-standard-jsx": "^8.1.0",
|
||||||
"grunt-shell": "~0.6.1",
|
"eslint-config-standard-react": "^9.2.0",
|
||||||
"grunt-mocha-test": "~0.8.1",
|
"eslint-plugin-chai-expect": "^2.1.0",
|
||||||
"sandboxed-module": "~0.3.0",
|
"eslint-plugin-chai-friendly": "^0.5.0",
|
||||||
"timekeeper": "0.0.4",
|
"eslint-plugin-import": "^2.20.1",
|
||||||
"grunt-execute": "^0.1.5",
|
"eslint-plugin-jsx-a11y": "^6.2.3",
|
||||||
"bunyan": "^0.22.1",
|
"eslint-plugin-mocha": "^6.3.0",
|
||||||
"grunt-bunyan": "^0.5.0"
|
"eslint-plugin-node": "^11.0.0",
|
||||||
|
"eslint-plugin-prettier": "^3.1.2",
|
||||||
|
"eslint-plugin-promise": "^4.2.1",
|
||||||
|
"eslint-plugin-react": "^7.19.0",
|
||||||
|
"eslint-plugin-standard": "^4.0.1",
|
||||||
|
"mocha": "^7.1.0",
|
||||||
|
"prettier": "^2.0.0",
|
||||||
|
"prettier-eslint-cli": "^5.0.0",
|
||||||
|
"sandboxed-module": "^2.0.3",
|
||||||
|
"sinon": "~9.0.1",
|
||||||
|
"timekeeper": "2.2.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
3
patch-texlive-dockerfile
Normal file
3
patch-texlive-dockerfile
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
FROM quay.io/sharelatex/texlive-full:2017.1
|
||||||
|
|
||||||
|
# RUN usermod -u 1001 tex
|
||||||
836
seccomp/clsi-profile.json
Normal file
836
seccomp/clsi-profile.json
Normal file
@@ -0,0 +1,836 @@
|
|||||||
|
{
|
||||||
|
"defaultAction": "SCMP_ACT_ERRNO",
|
||||||
|
"architectures": [
|
||||||
|
"SCMP_ARCH_X86_64",
|
||||||
|
"SCMP_ARCH_X86",
|
||||||
|
"SCMP_ARCH_X32"
|
||||||
|
],
|
||||||
|
"syscalls": [
|
||||||
|
{
|
||||||
|
"name": "access",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "arch_prctl",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "brk",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "chdir",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "chmod",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "clock_getres",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "clock_gettime",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "clock_nanosleep",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "clone",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": [
|
||||||
|
{
|
||||||
|
"index": 0,
|
||||||
|
"value": 2080505856,
|
||||||
|
"valueTwo": 0,
|
||||||
|
"op": "SCMP_CMP_MASKED_EQ"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "close",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "copy_file_range",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "creat",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "dup",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "dup2",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "dup3",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "execve",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "execveat",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "exit",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "exit_group",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "faccessat",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "fadvise64",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "fadvise64_64",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "fallocate",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "fchdir",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "fchmod",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "fchmodat",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "fcntl",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "fcntl64",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "fdatasync",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "fork",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "fstat",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "fstat64",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "fstatat64",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "fstatfs",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "fstatfs64",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "fsync",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "ftruncate",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "ftruncate64",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "futex",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "futimesat",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "getcpu",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "getcwd",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "getdents",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "getdents64",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "getegid",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "getegid32",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "geteuid",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "geteuid32",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "getgid",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "getgid32",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "getgroups",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "getgroups32",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "getpgid",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "getpgrp",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "getpid",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "getppid",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "getpriority",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "getresgid",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "getresgid32",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "getresuid",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "getresuid32",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "getrlimit",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "get_robust_list",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "getrusage",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "getsid",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "gettid",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "getuid",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "getuid32",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "ioctl",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "kill",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "_llseek",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "lseek",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "lstat",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "lstat64",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "madvise",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "mkdir",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "mkdirat",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "mmap",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "mmap2",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "mprotect",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "mremap",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "munmap",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "newfstatat",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "open",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "openat",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pause",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pipe",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pipe2",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "prctl",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pread64",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "preadv",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "prlimit64",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pwrite64",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pwritev",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "read",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "readlink",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "readlinkat",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "readv",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "rename",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "renameat",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "renameat2",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "restart_syscall",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "rmdir",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "rt_sigaction",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "rt_sigpending",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "rt_sigprocmask",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "rt_sigqueueinfo",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "rt_sigreturn",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "rt_sigsuspend",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "rt_sigtimedwait",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "rt_tgsigqueueinfo",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sched_getaffinity",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sched_getparam",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sched_get_priority_max",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sched_get_priority_min",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sched_getscheduler",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sched_rr_get_interval",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sched_yield",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sendfile",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sendfile64",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "setgroups",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "setgroups32",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "set_robust_list",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "set_tid_address",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sigaltstack",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "stat",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "stat64",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "statfs",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "statfs64",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sync",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sync_file_range",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "syncfs",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sysinfo",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "tgkill",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "timer_create",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "timer_delete",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "timer_getoverrun",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "timer_gettime",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "timer_settime",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "times",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "tkill",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "truncate",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "truncate64",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "umask",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "uname",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "unlink",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "unlinkat",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "utime",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "utimensat",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "utimes",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "vfork",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "vhangup",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "wait4",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "waitid",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "write",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "writev",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pread",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "setgid",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "setuid",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "capget",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "capset",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "fchown",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "gettimeofday",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
}, {
|
||||||
|
"name": "epoll_pwait",
|
||||||
|
"action": "SCMP_ACT_ALLOW",
|
||||||
|
"args": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
34
synctex.profile
Normal file
34
synctex.profile
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
include /etc/firejail/disable-common.inc
|
||||||
|
include /etc/firejail/disable-devel.inc
|
||||||
|
# include /etc/firejail/disable-mgmt.inc ## removed in 0.9.40
|
||||||
|
# include /etc/firejail/disable-secret.inc ## removed in 0.9.40
|
||||||
|
|
||||||
|
read-only /bin
|
||||||
|
blacklist /boot
|
||||||
|
blacklist /dev
|
||||||
|
read-only /etc
|
||||||
|
blacklist /home # blacklisted for synctex
|
||||||
|
read-only /lib
|
||||||
|
read-only /lib64
|
||||||
|
blacklist /media
|
||||||
|
blacklist /mnt
|
||||||
|
blacklist /opt
|
||||||
|
blacklist /root
|
||||||
|
read-only /run
|
||||||
|
blacklist /sbin
|
||||||
|
blacklist /selinux
|
||||||
|
blacklist /src
|
||||||
|
blacklist /sys
|
||||||
|
read-only /usr
|
||||||
|
|
||||||
|
caps.drop all
|
||||||
|
noroot
|
||||||
|
nogroups
|
||||||
|
net none
|
||||||
|
private-tmp
|
||||||
|
private-dev
|
||||||
|
shell none
|
||||||
|
seccomp
|
||||||
|
nonewprivs
|
||||||
|
|
||||||
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
Client = require "./helpers/Client"
|
|
||||||
request = require "request"
|
|
||||||
require("chai").should()
|
|
||||||
|
|
||||||
describe "Broken LaTeX file", ->
|
|
||||||
before ->
|
|
||||||
@broken_request =
|
|
||||||
resources: [
|
|
||||||
path: "main.tex"
|
|
||||||
content: '''
|
|
||||||
\\documentclass{articl % :(
|
|
||||||
\\begin{documen % :(
|
|
||||||
Broken
|
|
||||||
\\end{documen % :(
|
|
||||||
'''
|
|
||||||
]
|
|
||||||
@correct_request =
|
|
||||||
resources: [
|
|
||||||
path: "main.tex"
|
|
||||||
content: '''
|
|
||||||
\\documentclass{article}
|
|
||||||
\\begin{document}
|
|
||||||
Hello world
|
|
||||||
\\end{document}
|
|
||||||
'''
|
|
||||||
]
|
|
||||||
|
|
||||||
describe "on first run", ->
|
|
||||||
before (done) ->
|
|
||||||
@project_id = Client.randomId()
|
|
||||||
Client.compile @project_id, @broken_request, (@error, @res, @body) => done()
|
|
||||||
|
|
||||||
it "should return a failure status", ->
|
|
||||||
@body.compile.status.should.equal "failure"
|
|
||||||
|
|
||||||
describe "on second run", ->
|
|
||||||
before (done) ->
|
|
||||||
@project_id = Client.randomId()
|
|
||||||
Client.compile @project_id, @correct_request, () =>
|
|
||||||
Client.compile @project_id, @broken_request, (@error, @res, @body) =>
|
|
||||||
done()
|
|
||||||
|
|
||||||
it "should return a failure status", ->
|
|
||||||
@body.compile.status.should.equal "failure"
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
Client = require "./helpers/Client"
|
|
||||||
request = require "request"
|
|
||||||
require("chai").should()
|
|
||||||
|
|
||||||
describe "Deleting Old Files", ->
|
|
||||||
before ->
|
|
||||||
@request =
|
|
||||||
resources: [
|
|
||||||
path: "main.tex"
|
|
||||||
content: '''
|
|
||||||
\\documentclass{article}
|
|
||||||
\\begin{document}
|
|
||||||
Hello world
|
|
||||||
\\end{document}
|
|
||||||
'''
|
|
||||||
]
|
|
||||||
|
|
||||||
describe "on first run", ->
|
|
||||||
before (done) ->
|
|
||||||
@project_id = Client.randomId()
|
|
||||||
Client.compile @project_id, @request, (@error, @res, @body) => done()
|
|
||||||
|
|
||||||
it "should return a success status", ->
|
|
||||||
@body.compile.status.should.equal "success"
|
|
||||||
|
|
||||||
describe "after file has been deleted", ->
|
|
||||||
before (done) ->
|
|
||||||
@request.resources = []
|
|
||||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
|
||||||
done()
|
|
||||||
|
|
||||||
it "should return a failure status", ->
|
|
||||||
@body.compile.status.should.equal "failure"
|
|
||||||
|
|
||||||
@@ -1,83 +0,0 @@
|
|||||||
Client = require "./helpers/Client"
|
|
||||||
request = require "request"
|
|
||||||
require("chai").should()
|
|
||||||
fs = require "fs"
|
|
||||||
ChildProcess = require "child_process"
|
|
||||||
|
|
||||||
fixturePath = (path) -> __dirname + "/../fixtures/" + path
|
|
||||||
|
|
||||||
try
|
|
||||||
fs.mkdirSync(fixturePath("tmp"))
|
|
||||||
catch e
|
|
||||||
|
|
||||||
convertToPng = (pdfPath, pngPath, callback = (error) ->) ->
|
|
||||||
convert = ChildProcess.exec "convert #{fixturePath(pdfPath)} #{fixturePath(pngPath)}"
|
|
||||||
convert.on "exit", () ->
|
|
||||||
callback()
|
|
||||||
|
|
||||||
compare = (originalPath, generatedPath, callback = (error, same) ->) ->
|
|
||||||
proc = ChildProcess.exec "compare -metric mae #{fixturePath(originalPath)} #{fixturePath(generatedPath)} #{fixturePath("tmp/diff.png")}"
|
|
||||||
stderr = ""
|
|
||||||
proc.stderr.on "data", (chunk) -> stderr += chunk
|
|
||||||
proc.on "exit", () ->
|
|
||||||
if stderr.trim() == "0 (0)"
|
|
||||||
callback null, true
|
|
||||||
else
|
|
||||||
console.log stderr
|
|
||||||
callback null, false
|
|
||||||
|
|
||||||
compareMultiplePages = (project_id, callback = (error) ->) ->
|
|
||||||
compareNext = (page_no, callback) ->
|
|
||||||
path = "tmp/#{project_id}-source-#{page_no}.png"
|
|
||||||
fs.stat fixturePath(path), (error, stat) ->
|
|
||||||
if error?
|
|
||||||
callback()
|
|
||||||
else
|
|
||||||
compare "tmp/#{project_id}-source-#{page_no}.png", "tmp/#{project_id}-generated-#{page_no}.png", (error, same) =>
|
|
||||||
throw error if error?
|
|
||||||
same.should.equal true
|
|
||||||
compareNext page_no + 1, callback
|
|
||||||
compareNext 0, callback
|
|
||||||
|
|
||||||
downloadAndComparePdf = (project_id, example_dir, url, callback = (error) ->) ->
|
|
||||||
writeStream = fs.createWriteStream(fixturePath("tmp/#{project_id}.pdf"))
|
|
||||||
request.get(url).pipe(writeStream)
|
|
||||||
writeStream.on "close", () =>
|
|
||||||
convertToPng "tmp/#{project_id}.pdf", "tmp/#{project_id}-generated.png", (error) =>
|
|
||||||
throw error if error?
|
|
||||||
convertToPng "examples/#{example_dir}/output.pdf", "tmp/#{project_id}-source.png", (error) =>
|
|
||||||
throw error if error?
|
|
||||||
fs.stat fixturePath("tmp/#{project_id}-source-0.png"), (error, stat) =>
|
|
||||||
if error?
|
|
||||||
compare "tmp/#{project_id}-source.png", "tmp/#{project_id}-generated.png", (error, same) =>
|
|
||||||
throw error if error?
|
|
||||||
same.should.equal true
|
|
||||||
callback()
|
|
||||||
else
|
|
||||||
compareMultiplePages project_id, (error) ->
|
|
||||||
throw error if error?
|
|
||||||
callback()
|
|
||||||
|
|
||||||
Client.runServer(4242, fixturePath("examples"))
|
|
||||||
|
|
||||||
describe "Example Documents", ->
|
|
||||||
before (done) ->
|
|
||||||
ChildProcess.exec("rm test/acceptance/fixtures/tmp/*").on "exit", () -> done()
|
|
||||||
|
|
||||||
for example_dir in fs.readdirSync fixturePath("examples")
|
|
||||||
do (example_dir) ->
|
|
||||||
describe example_dir, ->
|
|
||||||
before ->
|
|
||||||
@project_id = Client.randomId()
|
|
||||||
|
|
||||||
it "should generate the correct pdf", (done) ->
|
|
||||||
Client.compileDirectory @project_id, fixturePath("examples"), example_dir, 4242, (error, res, body) =>
|
|
||||||
pdf = Client.getOutputFile body, "pdf"
|
|
||||||
downloadAndComparePdf(@project_id, example_dir, pdf.url, done)
|
|
||||||
|
|
||||||
it "should generate the correct pdf on the second run as well", (done) ->
|
|
||||||
Client.compileDirectory @project_id, fixturePath("examples"), example_dir, 4242, (error, res, body) =>
|
|
||||||
pdf = Client.getOutputFile body, "pdf"
|
|
||||||
downloadAndComparePdf(@project_id, example_dir, pdf.url, done)
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
Client = require "./helpers/Client"
|
|
||||||
request = require "request"
|
|
||||||
require("chai").should()
|
|
||||||
|
|
||||||
describe "Simple LaTeX file", ->
|
|
||||||
before (done) ->
|
|
||||||
@project_id = Client.randomId()
|
|
||||||
@request =
|
|
||||||
resources: [
|
|
||||||
path: "main.tex"
|
|
||||||
content: '''
|
|
||||||
\\documentclass{article}
|
|
||||||
\\begin{document}
|
|
||||||
Hello world
|
|
||||||
\\end{document}
|
|
||||||
'''
|
|
||||||
]
|
|
||||||
Client.compile @project_id, @request, (@error, @res, @body) => done()
|
|
||||||
|
|
||||||
it "should return the PDF", ->
|
|
||||||
pdf = Client.getOutputFile(@body, "pdf")
|
|
||||||
pdf.type.should.equal "pdf"
|
|
||||||
|
|
||||||
it "should return the log", ->
|
|
||||||
log = Client.getOutputFile(@body, "log")
|
|
||||||
log.type.should.equal "log"
|
|
||||||
|
|
||||||
it "should provide the pdf for download", (done) ->
|
|
||||||
pdf = Client.getOutputFile(@body, "pdf")
|
|
||||||
request.get pdf.url, (error, res, body) ->
|
|
||||||
res.statusCode.should.equal 200
|
|
||||||
done()
|
|
||||||
|
|
||||||
it "should provide the log for download", (done) ->
|
|
||||||
log = Client.getOutputFile(@body, "pdf")
|
|
||||||
request.get log.url, (error, res, body) ->
|
|
||||||
res.statusCode.should.equal 200
|
|
||||||
done()
|
|
||||||
|
|
||||||
@@ -1,38 +0,0 @@
|
|||||||
Client = require "./helpers/Client"
|
|
||||||
request = require "request"
|
|
||||||
require("chai").should()
|
|
||||||
expect = require("chai").expect
|
|
||||||
|
|
||||||
describe "Syncing", ->
|
|
||||||
before (done) ->
|
|
||||||
@request =
|
|
||||||
resources: [
|
|
||||||
path: "main.tex"
|
|
||||||
content: '''
|
|
||||||
\\documentclass{article}
|
|
||||||
\\begin{document}
|
|
||||||
Hello world
|
|
||||||
\\end{document}
|
|
||||||
'''
|
|
||||||
]
|
|
||||||
@project_id = Client.randomId()
|
|
||||||
Client.compile @project_id, @request, (@error, @res, @body) => done()
|
|
||||||
|
|
||||||
describe "from code to pdf", ->
|
|
||||||
it "should return the correct location", (done) ->
|
|
||||||
Client.syncFromCode @project_id, "main.tex", 3, 5, (error, pdfPositions) ->
|
|
||||||
throw error if error?
|
|
||||||
expect(pdfPositions).to.deep.equal(
|
|
||||||
pdf: [ { page: 1, h: 133.77, v: 134.76, height: 6.92, width: 343.71 } ]
|
|
||||||
)
|
|
||||||
done()
|
|
||||||
|
|
||||||
describe "from pdf to code", ->
|
|
||||||
it "should return the correct location", (done) ->
|
|
||||||
Client.syncFromPdf @project_id, 1, 100, 200, (error, codePositions) ->
|
|
||||||
throw error if error?
|
|
||||||
expect(codePositions).to.deep.equal(
|
|
||||||
code: [ { file: 'main.tex', line: 3, column: -1 } ]
|
|
||||||
)
|
|
||||||
done()
|
|
||||||
|
|
||||||
@@ -1,27 +0,0 @@
|
|||||||
Client = require "./helpers/Client"
|
|
||||||
request = require "request"
|
|
||||||
require("chai").should()
|
|
||||||
|
|
||||||
describe "Timed out compile", ->
|
|
||||||
before (done) ->
|
|
||||||
@request =
|
|
||||||
options:
|
|
||||||
timeout: 0.01 #seconds
|
|
||||||
resources: [
|
|
||||||
path: "main.tex"
|
|
||||||
content: '''
|
|
||||||
\\documentclass{article}
|
|
||||||
\\begin{document}
|
|
||||||
Hello world
|
|
||||||
\\end{document}
|
|
||||||
'''
|
|
||||||
]
|
|
||||||
@project_id = Client.randomId()
|
|
||||||
Client.compile @project_id, @request, (@error, @res, @body) => done()
|
|
||||||
|
|
||||||
it "should return a timeout error", ->
|
|
||||||
@body.compile.error.should.equal "container timed out"
|
|
||||||
|
|
||||||
it "should return a timedout status", ->
|
|
||||||
@body.compile.status.should.equal "timedout"
|
|
||||||
|
|
||||||
@@ -1,220 +0,0 @@
|
|||||||
Client = require "./helpers/Client"
|
|
||||||
request = require "request"
|
|
||||||
require("chai").should()
|
|
||||||
sinon = require "sinon"
|
|
||||||
|
|
||||||
host = "localhost"
|
|
||||||
|
|
||||||
Server =
|
|
||||||
run: () ->
|
|
||||||
express = require "express"
|
|
||||||
app = express()
|
|
||||||
|
|
||||||
staticServer = express.static __dirname + "/../fixtures/"
|
|
||||||
app.get "/:random_id/*", (req, res, next) =>
|
|
||||||
@getFile(req.url)
|
|
||||||
req.url = "/" + req.params[0]
|
|
||||||
staticServer(req, res, next)
|
|
||||||
|
|
||||||
app.listen 31415, host
|
|
||||||
|
|
||||||
getFile: () ->
|
|
||||||
|
|
||||||
randomId: () ->
|
|
||||||
Math.random().toString(16).slice(2)
|
|
||||||
|
|
||||||
Server.run()
|
|
||||||
|
|
||||||
describe "Url Caching", ->
|
|
||||||
describe "Downloading an image for the first time", ->
|
|
||||||
before (done) ->
|
|
||||||
@project_id = Client.randomId()
|
|
||||||
@file = "#{Server.randomId()}/lion.png"
|
|
||||||
@request =
|
|
||||||
resources: [{
|
|
||||||
path: "main.tex"
|
|
||||||
content: '''
|
|
||||||
\\documentclass{article}
|
|
||||||
\\usepackage{graphicx}
|
|
||||||
\\begin{document}
|
|
||||||
\\includegraphics{lion.png}
|
|
||||||
\\end{document}
|
|
||||||
'''
|
|
||||||
}, {
|
|
||||||
path: "lion.png"
|
|
||||||
url: "http://#{host}:31415/#{@file}"
|
|
||||||
}]
|
|
||||||
|
|
||||||
sinon.spy Server, "getFile"
|
|
||||||
Client.compile @project_id, @request, (@error, @res, @body) => done()
|
|
||||||
|
|
||||||
afterEach ->
|
|
||||||
Server.getFile.restore()
|
|
||||||
|
|
||||||
it "should download the image", ->
|
|
||||||
Server.getFile
|
|
||||||
.calledWith("/" + @file)
|
|
||||||
.should.equal true
|
|
||||||
|
|
||||||
describe "When an image is in the cache and the last modified date is unchanged", ->
|
|
||||||
before (done) ->
|
|
||||||
@project_id = Client.randomId()
|
|
||||||
@file = "#{Server.randomId()}/lion.png"
|
|
||||||
@request =
|
|
||||||
resources: [{
|
|
||||||
path: "main.tex"
|
|
||||||
content: '''
|
|
||||||
\\documentclass{article}
|
|
||||||
\\usepackage{graphicx}
|
|
||||||
\\begin{document}
|
|
||||||
\\includegraphics{lion.png}
|
|
||||||
\\end{document}
|
|
||||||
'''
|
|
||||||
}, @image_resource = {
|
|
||||||
path: "lion.png"
|
|
||||||
url: "http://#{host}:31415/#{@file}"
|
|
||||||
modified: Date.now()
|
|
||||||
}]
|
|
||||||
|
|
||||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
|
||||||
sinon.spy Server, "getFile"
|
|
||||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
|
||||||
done()
|
|
||||||
|
|
||||||
after ->
|
|
||||||
Server.getFile.restore()
|
|
||||||
|
|
||||||
it "should not download the image again", ->
|
|
||||||
Server.getFile.called.should.equal false
|
|
||||||
|
|
||||||
describe "When an image is in the cache and the last modified date is advanced", ->
|
|
||||||
before (done) ->
|
|
||||||
@project_id = Client.randomId()
|
|
||||||
@file = "#{Server.randomId()}/lion.png"
|
|
||||||
@request =
|
|
||||||
resources: [{
|
|
||||||
path: "main.tex"
|
|
||||||
content: '''
|
|
||||||
\\documentclass{article}
|
|
||||||
\\usepackage{graphicx}
|
|
||||||
\\begin{document}
|
|
||||||
\\includegraphics{lion.png}
|
|
||||||
\\end{document}
|
|
||||||
'''
|
|
||||||
}, @image_resource = {
|
|
||||||
path: "lion.png"
|
|
||||||
url: "http://#{host}:31415/#{@file}"
|
|
||||||
modified: @last_modified = Date.now()
|
|
||||||
}]
|
|
||||||
|
|
||||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
|
||||||
sinon.spy Server, "getFile"
|
|
||||||
@image_resource.modified = new Date(@last_modified + 3000)
|
|
||||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
|
||||||
done()
|
|
||||||
|
|
||||||
afterEach ->
|
|
||||||
Server.getFile.restore()
|
|
||||||
|
|
||||||
it "should download the image again", ->
|
|
||||||
Server.getFile.called.should.equal true
|
|
||||||
|
|
||||||
describe "When an image is in the cache and the last modified date is further in the past", ->
|
|
||||||
before (done) ->
|
|
||||||
@project_id = Client.randomId()
|
|
||||||
@file = "#{Server.randomId()}/lion.png"
|
|
||||||
@request =
|
|
||||||
resources: [{
|
|
||||||
path: "main.tex"
|
|
||||||
content: '''
|
|
||||||
\\documentclass{article}
|
|
||||||
\\usepackage{graphicx}
|
|
||||||
\\begin{document}
|
|
||||||
\\includegraphics{lion.png}
|
|
||||||
\\end{document}
|
|
||||||
'''
|
|
||||||
}, @image_resource = {
|
|
||||||
path: "lion.png"
|
|
||||||
url: "http://#{host}:31415/#{@file}"
|
|
||||||
modified: @last_modified = Date.now()
|
|
||||||
}]
|
|
||||||
|
|
||||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
|
||||||
sinon.spy Server, "getFile"
|
|
||||||
@image_resource.modified = new Date(@last_modified - 3000)
|
|
||||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
|
||||||
done()
|
|
||||||
|
|
||||||
afterEach ->
|
|
||||||
Server.getFile.restore()
|
|
||||||
|
|
||||||
it "should not download the image again", ->
|
|
||||||
Server.getFile.called.should.equal false
|
|
||||||
|
|
||||||
describe "When an image is in the cache and the last modified date is not specified", ->
|
|
||||||
before (done) ->
|
|
||||||
@project_id = Client.randomId()
|
|
||||||
@file = "#{Server.randomId()}/lion.png"
|
|
||||||
@request =
|
|
||||||
resources: [{
|
|
||||||
path: "main.tex"
|
|
||||||
content: '''
|
|
||||||
\\documentclass{article}
|
|
||||||
\\usepackage{graphicx}
|
|
||||||
\\begin{document}
|
|
||||||
\\includegraphics{lion.png}
|
|
||||||
\\end{document}
|
|
||||||
'''
|
|
||||||
}, @image_resource = {
|
|
||||||
path: "lion.png"
|
|
||||||
url: "http://#{host}:31415/#{@file}"
|
|
||||||
modified: @last_modified = Date.now()
|
|
||||||
}]
|
|
||||||
|
|
||||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
|
||||||
sinon.spy Server, "getFile"
|
|
||||||
delete @image_resource.modified
|
|
||||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
|
||||||
done()
|
|
||||||
|
|
||||||
afterEach ->
|
|
||||||
Server.getFile.restore()
|
|
||||||
|
|
||||||
it "should download the image again", ->
|
|
||||||
Server.getFile.called.should.equal true
|
|
||||||
|
|
||||||
describe "After clearing the cache", ->
|
|
||||||
before (done) ->
|
|
||||||
@project_id = Client.randomId()
|
|
||||||
@file = "#{Server.randomId()}/lion.png"
|
|
||||||
@request =
|
|
||||||
resources: [{
|
|
||||||
path: "main.tex"
|
|
||||||
content: '''
|
|
||||||
\\documentclass{article}
|
|
||||||
\\usepackage{graphicx}
|
|
||||||
\\begin{document}
|
|
||||||
\\includegraphics{lion.png}
|
|
||||||
\\end{document}
|
|
||||||
'''
|
|
||||||
}, @image_resource = {
|
|
||||||
path: "lion.png"
|
|
||||||
url: "http://#{host}:31415/#{@file}"
|
|
||||||
modified: @last_modified = Date.now()
|
|
||||||
}]
|
|
||||||
|
|
||||||
Client.compile @project_id, @request, (error) =>
|
|
||||||
throw error if error?
|
|
||||||
Client.clearCache @project_id, (error, res, body) =>
|
|
||||||
throw error if error?
|
|
||||||
sinon.spy Server, "getFile"
|
|
||||||
Client.compile @project_id, @request, (@error, @res, @body) =>
|
|
||||||
done()
|
|
||||||
|
|
||||||
afterEach ->
|
|
||||||
Server.getFile.restore()
|
|
||||||
|
|
||||||
it "should download the image again", ->
|
|
||||||
Server.getFile.called.should.equal true
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,92 +0,0 @@
|
|||||||
request = require "request"
|
|
||||||
fs = require "fs"
|
|
||||||
Settings = require "settings-sharelatex"
|
|
||||||
|
|
||||||
host = "localhost"
|
|
||||||
|
|
||||||
module.exports = Client =
|
|
||||||
host: Settings.apis.clsi.url
|
|
||||||
|
|
||||||
randomId: () ->
|
|
||||||
Math.random().toString(16).slice(2)
|
|
||||||
|
|
||||||
compile: (project_id, data, callback = (error, res, body) ->) ->
|
|
||||||
request.post {
|
|
||||||
url: "#{@host}/project/#{project_id}/compile"
|
|
||||||
json:
|
|
||||||
compile: data
|
|
||||||
}, callback
|
|
||||||
|
|
||||||
clearCache: (project_id, callback = (error, res, body) ->) ->
|
|
||||||
request.del "#{@host}/project/#{project_id}", callback
|
|
||||||
|
|
||||||
getOutputFile: (response, type) ->
|
|
||||||
for file in response.compile.outputFiles
|
|
||||||
if file.type == type and file.url.match("output.#{type}")
|
|
||||||
return file
|
|
||||||
return null
|
|
||||||
|
|
||||||
runServer: (port, directory) ->
|
|
||||||
express = require("express")
|
|
||||||
app = express()
|
|
||||||
app.use express.static(directory)
|
|
||||||
app.listen(port, host)
|
|
||||||
|
|
||||||
syncFromCode: (project_id, file, line, column, callback = (error, pdfPositions) ->) ->
|
|
||||||
request.get {
|
|
||||||
url: "#{@host}/project/#{project_id}/sync/code"
|
|
||||||
qs: {
|
|
||||||
file: file
|
|
||||||
line: line
|
|
||||||
column: column
|
|
||||||
}
|
|
||||||
}, (error, response, body) ->
|
|
||||||
return callback(error) if error?
|
|
||||||
callback null, JSON.parse(body)
|
|
||||||
|
|
||||||
syncFromPdf: (project_id, page, h, v, callback = (error, pdfPositions) ->) ->
|
|
||||||
request.get {
|
|
||||||
url: "#{@host}/project/#{project_id}/sync/pdf"
|
|
||||||
qs: {
|
|
||||||
page: page,
|
|
||||||
h: h, v: v
|
|
||||||
}
|
|
||||||
}, (error, response, body) ->
|
|
||||||
return callback(error) if error?
|
|
||||||
callback null, JSON.parse(body)
|
|
||||||
|
|
||||||
compileDirectory: (project_id, baseDirectory, directory, serverPort, callback = (error, res, body) ->) ->
|
|
||||||
resources = []
|
|
||||||
entities = fs.readdirSync("#{baseDirectory}/#{directory}")
|
|
||||||
rootResourcePath = "main.tex"
|
|
||||||
while (entities.length > 0)
|
|
||||||
entity = entities.pop()
|
|
||||||
stat = fs.statSync("#{baseDirectory}/#{directory}/#{entity}")
|
|
||||||
if stat.isDirectory()
|
|
||||||
entities = entities.concat fs.readdirSync("#{baseDirectory}/#{directory}/#{entity}").map (subEntity) ->
|
|
||||||
if subEntity == "main.tex"
|
|
||||||
rootResourcePath = "#{entity}/#{subEntity}"
|
|
||||||
return "#{entity}/#{subEntity}"
|
|
||||||
else if stat.isFile() and entity != "output.pdf"
|
|
||||||
extension = entity.split(".").pop()
|
|
||||||
if ["tex", "bib", "cls", "sty", "pdf_tex", "Rtex", "ist", "md", "Rmd"].indexOf(extension) > -1
|
|
||||||
resources.push
|
|
||||||
path: entity
|
|
||||||
content: fs.readFileSync("#{baseDirectory}/#{directory}/#{entity}").toString()
|
|
||||||
else if ["eps", "ttf", "png", "jpg", "pdf", "jpeg"].indexOf(extension) > -1
|
|
||||||
resources.push
|
|
||||||
path: entity
|
|
||||||
url: "http://#{host}:#{serverPort}/#{directory}/#{entity}"
|
|
||||||
modified: stat.mtime
|
|
||||||
|
|
||||||
fs.readFile "#{baseDirectory}/#{directory}/options.json", (error, body) =>
|
|
||||||
req =
|
|
||||||
resources: resources
|
|
||||||
rootResourcePath: rootResourcePath
|
|
||||||
|
|
||||||
if !error?
|
|
||||||
body = JSON.parse body
|
|
||||||
req.options = body
|
|
||||||
|
|
||||||
@compile project_id, req, callback
|
|
||||||
|
|
||||||
Binary file not shown.
Binary file not shown.
12
test/acceptance/fixtures/examples/fontawesome/main.tex
Normal file
12
test/acceptance/fixtures/examples/fontawesome/main.tex
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
\documentclass{article}
|
||||||
|
\usepackage{fontawesome}
|
||||||
|
|
||||||
|
\begin{document}
|
||||||
|
Cloud \faCloud
|
||||||
|
|
||||||
|
Cog \faCog
|
||||||
|
|
||||||
|
Database \faDatabase
|
||||||
|
|
||||||
|
Leaf \faLeaf
|
||||||
|
\end{document}
|
||||||
BIN
test/acceptance/fixtures/examples/fontawesome/output.pdf
Normal file
BIN
test/acceptance/fixtures/examples/fontawesome/output.pdf
Normal file
Binary file not shown.
@@ -0,0 +1,16 @@
|
|||||||
|
\documentclass{article}
|
||||||
|
\usepackage{fontspec}
|
||||||
|
\defaultfontfeatures{Extension = .otf} % this is needed because
|
||||||
|
% fontawesome package loads by
|
||||||
|
% font name only
|
||||||
|
\usepackage{fontawesome}
|
||||||
|
|
||||||
|
\begin{document}
|
||||||
|
Cloud \faCloud
|
||||||
|
|
||||||
|
Cog \faCog
|
||||||
|
|
||||||
|
Database \faDatabase
|
||||||
|
|
||||||
|
Leaf \faLeaf
|
||||||
|
\end{document}
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"compiler": "xelatex"
|
||||||
|
}
|
||||||
BIN
test/acceptance/fixtures/examples/fontawesome_xelatex/output.pdf
Normal file
BIN
test/acceptance/fixtures/examples/fontawesome_xelatex/output.pdf
Normal file
Binary file not shown.
14
test/acceptance/fixtures/examples/hebrew/main.tex
Normal file
14
test/acceptance/fixtures/examples/hebrew/main.tex
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
\documentclass{article}
|
||||||
|
\usepackage[utf8x]{inputenc}
|
||||||
|
\usepackage[hebrew,english]{babel}
|
||||||
|
|
||||||
|
\begin{document}
|
||||||
|
\selectlanguage{hebrew}
|
||||||
|
|
||||||
|
כדי לכתוב משהו באנגלית חייבים להשתמש במקרו הבא וכאן
|
||||||
|
|
||||||
|
ממשיכים לכתוב בעברית. טקסט נוסחאות תמיד יהיה בכיוון שמאל-לימין
|
||||||
|
|
||||||
|
\selectlanguage{english}
|
||||||
|
This is a test.
|
||||||
|
\end{document}
|
||||||
BIN
test/acceptance/fixtures/examples/hebrew/output.pdf
Normal file
BIN
test/acceptance/fixtures/examples/hebrew/output.pdf
Normal file
Binary file not shown.
Binary file not shown.
35
test/acceptance/fixtures/examples/knitr_utf8/main.Rtex
Normal file
35
test/acceptance/fixtures/examples/knitr_utf8/main.Rtex
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
\documentclass{article}
|
||||||
|
\usepackage[utf8]{inputenc}
|
||||||
|
\usepackage[spanish]{babel}
|
||||||
|
|
||||||
|
\begin{document}
|
||||||
|
|
||||||
|
\tableofcontents
|
||||||
|
|
||||||
|
\vspace{2cm} %Add a 2cm space
|
||||||
|
|
||||||
|
\begin{abstract}
|
||||||
|
Este es un breve resumen del contenido del
|
||||||
|
documento escrito en español.
|
||||||
|
\end{abstract}
|
||||||
|
|
||||||
|
\section{Sección Introductoria}
|
||||||
|
Esta es la primera sección, podemos agregar
|
||||||
|
algunos elementos adicionales y todo será
|
||||||
|
escrito correctamente. Más aún, si una palabra
|
||||||
|
es demaciado larga y tiene que ser truncada,
|
||||||
|
babel tratará de truncarla correctamente
|
||||||
|
dependiendo del idioma.
|
||||||
|
|
||||||
|
\section{Sección con teoremas}
|
||||||
|
Esta sección es para ver que pasa con los comandos
|
||||||
|
que definen texto
|
||||||
|
|
||||||
|
%% chunk options: cache this chunk
|
||||||
|
%% begin.rcode my-cache, cache=TRUE
|
||||||
|
% set.seed(123)
|
||||||
|
% x = runif(10)
|
||||||
|
% sd(x) # standard deviation
|
||||||
|
%% end.rcode
|
||||||
|
|
||||||
|
\end{document}
|
||||||
BIN
test/acceptance/fixtures/examples/knitr_utf8/output.pdf
Normal file
BIN
test/acceptance/fixtures/examples/knitr_utf8/output.pdf
Normal file
Binary file not shown.
@@ -1,4 +1,4 @@
|
|||||||
\documentclass{article}
|
\documentclass[a4paper]{article}
|
||||||
|
|
||||||
\usepackage{graphicx}
|
\usepackage{graphicx}
|
||||||
|
|
||||||
|
|||||||
Binary file not shown.
Binary file not shown.
@@ -1,17 +0,0 @@
|
|||||||
Section Title
|
|
||||||
-------------
|
|
||||||
|
|
||||||
* List item one
|
|
||||||
* List item two
|
|
||||||
|
|
||||||
: Sample grid table.
|
|
||||||
|
|
||||||
+---------------+---------------+--------------------+
|
|
||||||
| Fruit | Price | Advantages |
|
|
||||||
+===============+===============+====================+
|
|
||||||
| Bananas | $1.34 | - built-in wrapper |
|
|
||||||
| | | - bright color |
|
|
||||||
+---------------+---------------+--------------------+
|
|
||||||
| Oranges | $2.10 | - cures scurvy |
|
|
||||||
| | | - tasty |
|
|
||||||
+---------------+---------------+--------------------+
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
\documentclass{article}
|
|
||||||
\usepackage{longtable}
|
|
||||||
\usepackage{booktabs, multicol, multirow}
|
|
||||||
|
|
||||||
\begin{document}
|
|
||||||
|
|
||||||
\input{chapters/chapter1}
|
|
||||||
|
|
||||||
\end{document}
|
|
||||||
Binary file not shown.
@@ -1,23 +0,0 @@
|
|||||||
% Title
|
|
||||||
% Author
|
|
||||||
% Date
|
|
||||||
|
|
||||||
Chapter title
|
|
||||||
=============
|
|
||||||
|
|
||||||
Section Title
|
|
||||||
-------------
|
|
||||||
|
|
||||||
Hello world. Have a nice table:
|
|
||||||
|
|
||||||
: Sample grid table.
|
|
||||||
|
|
||||||
+---------------+---------------+--------------------+
|
|
||||||
| Fruit | Price | Advantages |
|
|
||||||
+===============+===============+====================+
|
|
||||||
| Bananas | $1.34 | - built-in wrapper |
|
|
||||||
| | | - bright color |
|
|
||||||
+---------------+---------------+--------------------+
|
|
||||||
| Oranges | $2.10 | - cures scurvy |
|
|
||||||
| | | - tasty |
|
|
||||||
+---------------+---------------+--------------------+
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user