prettier: convert app/js decaffeinated files to Prettier format

This commit is contained in:
mserranom
2020-02-19 12:14:37 +01:00
parent 4576ef54fb
commit cffbd4e9ef
26 changed files with 3881 additions and 2639 deletions

View File

@@ -5,16 +5,16 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let commandRunnerPath; let commandRunnerPath
const Settings = require("settings-sharelatex"); const Settings = require('settings-sharelatex')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
if ((Settings.clsi != null ? Settings.clsi.dockerRunner : undefined) === true) { if ((Settings.clsi != null ? Settings.clsi.dockerRunner : undefined) === true) {
commandRunnerPath = "./DockerRunner"; commandRunnerPath = './DockerRunner'
} else { } else {
commandRunnerPath = "./LocalCommandRunner"; commandRunnerPath = './LocalCommandRunner'
} }
logger.info({commandRunnerPath}, "selecting command runner for clsi"); logger.info({ commandRunnerPath }, 'selecting command runner for clsi')
const CommandRunner = require(commandRunnerPath); const CommandRunner = require(commandRunnerPath)
module.exports = CommandRunner; module.exports = CommandRunner

View File

@@ -12,159 +12,227 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let CompileController; let CompileController
const RequestParser = require("./RequestParser"); const RequestParser = require('./RequestParser')
const CompileManager = require("./CompileManager"); const CompileManager = require('./CompileManager')
const Settings = require("settings-sharelatex"); const Settings = require('settings-sharelatex')
const Metrics = require("./Metrics"); const Metrics = require('./Metrics')
const ProjectPersistenceManager = require("./ProjectPersistenceManager"); const ProjectPersistenceManager = require('./ProjectPersistenceManager')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const Errors = require("./Errors"); const Errors = require('./Errors')
module.exports = (CompileController = { module.exports = CompileController = {
compile(req, res, next) { compile(req, res, next) {
if (next == null) { next = function(error) {}; } if (next == null) {
const timer = new Metrics.Timer("compile-request"); next = function(error) {}
}
const timer = new Metrics.Timer('compile-request')
return RequestParser.parse(req.body, function(error, request) { return RequestParser.parse(req.body, function(error, request) {
if (error != null) { return next(error); } if (error != null) {
request.project_id = req.params.project_id; return next(error)
if (req.params.user_id != null) { request.user_id = req.params.user_id; } }
return ProjectPersistenceManager.markProjectAsJustAccessed(request.project_id, function(error) { request.project_id = req.params.project_id
if (error != null) { return next(error); } if (req.params.user_id != null) {
return CompileManager.doCompileWithLock(request, function(error, outputFiles) { request.user_id = req.params.user_id
let code, status; }
if (outputFiles == null) { outputFiles = []; } return ProjectPersistenceManager.markProjectAsJustAccessed(
request.project_id,
function(error) {
if (error != null) {
return next(error)
}
return CompileManager.doCompileWithLock(request, function(
error,
outputFiles
) {
let code, status
if (outputFiles == null) {
outputFiles = []
}
if (error instanceof Errors.AlreadyCompilingError) { if (error instanceof Errors.AlreadyCompilingError) {
code = 423; // Http 423 Locked code = 423 // Http 423 Locked
status = "compile-in-progress"; status = 'compile-in-progress'
} else if (error instanceof Errors.FilesOutOfSyncError) { } else if (error instanceof Errors.FilesOutOfSyncError) {
code = 409; // Http 409 Conflict code = 409 // Http 409 Conflict
status = "retry"; status = 'retry'
} else if (error != null ? error.terminated : undefined) { } else if (error != null ? error.terminated : undefined) {
status = "terminated"; status = 'terminated'
} else if (error != null ? error.validate : undefined) { } else if (error != null ? error.validate : undefined) {
status = `validation-${error.validate}`; status = `validation-${error.validate}`
} else if (error != null ? error.timedout : undefined) { } else if (error != null ? error.timedout : undefined) {
status = "timedout"; status = 'timedout'
logger.log({err: error, project_id: request.project_id}, "timeout running compile"); logger.log(
{ err: error, project_id: request.project_id },
'timeout running compile'
)
} else if (error != null) { } else if (error != null) {
status = "error"; status = 'error'
code = 500; code = 500
logger.warn({err: error, project_id: request.project_id}, "error running compile"); logger.warn(
{ err: error, project_id: request.project_id },
'error running compile'
)
} else { } else {
let file; let file
status = "failure"; status = 'failure'
for (file of Array.from(outputFiles)) { for (file of Array.from(outputFiles)) {
if (file.path != null ? file.path.match(/output\.pdf$/) : undefined) { if (
status = "success"; file.path != null
? file.path.match(/output\.pdf$/)
: undefined
) {
status = 'success'
} }
} }
if (status === "failure") { if (status === 'failure') {
logger.warn({project_id: request.project_id, outputFiles}, "project failed to compile successfully, no output.pdf generated"); logger.warn(
{ project_id: request.project_id, outputFiles },
'project failed to compile successfully, no output.pdf generated'
)
} }
// log an error if any core files are found // log an error if any core files are found
for (file of Array.from(outputFiles)) { for (file of Array.from(outputFiles)) {
if (file.path === "core") { if (file.path === 'core') {
logger.error({project_id:request.project_id, req, outputFiles}, "core file found in output"); logger.error(
{ project_id: request.project_id, req, outputFiles },
'core file found in output'
)
} }
} }
} }
if (error != null) { if (error != null) {
outputFiles = error.outputFiles || []; outputFiles = error.outputFiles || []
} }
timer.done(); timer.done()
return res.status(code || 200).send({ return res.status(code || 200).send({
compile: { compile: {
status, status,
error: (error != null ? error.message : undefined) || error, error: (error != null ? error.message : undefined) || error,
outputFiles: outputFiles.map(file => outputFiles: outputFiles.map(file => ({
({
url: url:
`${Settings.apis.clsi.url}/project/${request.project_id}` + `${Settings.apis.clsi.url}/project/${request.project_id}` +
((request.user_id != null) ? `/user/${request.user_id}` : "") + (request.user_id != null
((file.build != null) ? `/build/${file.build}` : "") + ? `/user/${request.user_id}`
: '') +
(file.build != null ? `/build/${file.build}` : '') +
`/output/${file.path}`, `/output/${file.path}`,
path: file.path, path: file.path,
type: file.type, type: file.type,
build: file.build build: file.build
}) }))
)
} }
}); })
}); })
}); }
}); )
})
}, },
stopCompile(req, res, next) { stopCompile(req, res, next) {
const {project_id, user_id} = req.params; const { project_id, user_id } = req.params
return CompileManager.stopCompile(project_id, user_id, function(error) { return CompileManager.stopCompile(project_id, user_id, function(error) {
if (error != null) { return next(error); } if (error != null) {
return res.sendStatus(204); return next(error)
}); }
return res.sendStatus(204)
})
}, },
clearCache(req, res, next) { clearCache(req, res, next) {
if (next == null) { next = function(error) {}; } if (next == null) {
return ProjectPersistenceManager.clearProject(req.params.project_id, req.params.user_id, function(error) { next = function(error) {}
if (error != null) { return next(error); } }
return res.sendStatus(204); return ProjectPersistenceManager.clearProject(
}); req.params.project_id,
req.params.user_id,
function(error) {
if (error != null) {
return next(error)
}
return res.sendStatus(204)
}
)
}, // No content }, // No content
syncFromCode(req, res, next) { syncFromCode(req, res, next) {
if (next == null) { next = function(error) {}; } if (next == null) {
const { file } = req.query; next = function(error) {}
const line = parseInt(req.query.line, 10); }
const column = parseInt(req.query.column, 10); const { file } = req.query
const { project_id } = req.params; const line = parseInt(req.query.line, 10)
const { user_id } = req.params; const column = parseInt(req.query.column, 10)
return CompileManager.syncFromCode(project_id, user_id, file, line, column, function(error, pdfPositions) { const { project_id } = req.params
if (error != null) { return next(error); } const { user_id } = req.params
return CompileManager.syncFromCode(
project_id,
user_id,
file,
line,
column,
function(error, pdfPositions) {
if (error != null) {
return next(error)
}
return res.json({ return res.json({
pdf: pdfPositions pdf: pdfPositions
}); })
}); }
)
}, },
syncFromPdf(req, res, next) { syncFromPdf(req, res, next) {
if (next == null) { next = function(error) {}; } if (next == null) {
const page = parseInt(req.query.page, 10); next = function(error) {}
const h = parseFloat(req.query.h); }
const v = parseFloat(req.query.v); const page = parseInt(req.query.page, 10)
const { project_id } = req.params; const h = parseFloat(req.query.h)
const { user_id } = req.params; const v = parseFloat(req.query.v)
return CompileManager.syncFromPdf(project_id, user_id, page, h, v, function(error, codePositions) { const { project_id } = req.params
if (error != null) { return next(error); } const { user_id } = req.params
return CompileManager.syncFromPdf(project_id, user_id, page, h, v, function(
error,
codePositions
) {
if (error != null) {
return next(error)
}
return res.json({ return res.json({
code: codePositions code: codePositions
}); })
}); })
}, },
wordcount(req, res, next) { wordcount(req, res, next) {
if (next == null) { next = function(error) {}; } if (next == null) {
const file = req.query.file || "main.tex"; next = function(error) {}
const { project_id } = req.params; }
const { user_id } = req.params; const file = req.query.file || 'main.tex'
const { image } = req.query; const { project_id } = req.params
logger.log({image, file, project_id}, "word count request"); const { user_id } = req.params
const { image } = req.query
logger.log({ image, file, project_id }, 'word count request')
return CompileManager.wordcount(project_id, user_id, file, image, function(error, result) { return CompileManager.wordcount(project_id, user_id, file, image, function(
if (error != null) { return next(error); } error,
result
) {
if (error != null) {
return next(error)
}
return res.json({ return res.json({
texcount: result texcount: result
}); })
}); })
}, },
status(req, res, next) { status(req, res, next) {
if (next == null) { next = function(error){}; } if (next == null) {
return res.send("OK"); next = function(error) {}
}
return res.send('OK')
}
} }
});

View File

@@ -15,109 +15,180 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let CompileManager; let CompileManager
const ResourceWriter = require("./ResourceWriter"); const ResourceWriter = require('./ResourceWriter')
const LatexRunner = require("./LatexRunner"); const LatexRunner = require('./LatexRunner')
const OutputFileFinder = require("./OutputFileFinder"); const OutputFileFinder = require('./OutputFileFinder')
const OutputCacheManager = require("./OutputCacheManager"); const OutputCacheManager = require('./OutputCacheManager')
const Settings = require("settings-sharelatex"); const Settings = require('settings-sharelatex')
const Path = require("path"); const Path = require('path')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const Metrics = require("./Metrics"); const Metrics = require('./Metrics')
const child_process = require("child_process"); const child_process = require('child_process')
const DraftModeManager = require("./DraftModeManager"); const DraftModeManager = require('./DraftModeManager')
const TikzManager = require("./TikzManager"); const TikzManager = require('./TikzManager')
const LockManager = require("./LockManager"); const LockManager = require('./LockManager')
const fs = require("fs"); const fs = require('fs')
const fse = require("fs-extra"); const fse = require('fs-extra')
const os = require("os"); const os = require('os')
const async = require("async"); const async = require('async')
const Errors = require('./Errors'); const Errors = require('./Errors')
const CommandRunner = require("./CommandRunner"); const CommandRunner = require('./CommandRunner')
const getCompileName = function(project_id, user_id) { const getCompileName = function(project_id, user_id) {
if (user_id != null) { return `${project_id}-${user_id}`; } else { return project_id; } if (user_id != null) {
}; return `${project_id}-${user_id}`
} else {
return project_id
}
}
const getCompileDir = (project_id, user_id) => Path.join(Settings.path.compilesDir, getCompileName(project_id, user_id)); const getCompileDir = (project_id, user_id) =>
Path.join(Settings.path.compilesDir, getCompileName(project_id, user_id))
module.exports = (CompileManager = {
module.exports = CompileManager = {
doCompileWithLock(request, callback) { doCompileWithLock(request, callback) {
if (callback == null) { callback = function(error, outputFiles) {}; } if (callback == null) {
const compileDir = getCompileDir(request.project_id, request.user_id); callback = function(error, outputFiles) {}
const lockFile = Path.join(compileDir, ".project-lock"); }
const compileDir = getCompileDir(request.project_id, request.user_id)
const lockFile = Path.join(compileDir, '.project-lock')
// use a .project-lock file in the compile directory to prevent // use a .project-lock file in the compile directory to prevent
// simultaneous compiles // simultaneous compiles
return fse.ensureDir(compileDir, function(error) { return fse.ensureDir(compileDir, function(error) {
if (error != null) { return callback(error); } if (error != null) {
return LockManager.runWithLock(lockFile, releaseLock => CompileManager.doCompile(request, releaseLock) return callback(error)
, callback); }
}); return LockManager.runWithLock(
lockFile,
releaseLock => CompileManager.doCompile(request, releaseLock),
callback
)
})
}, },
doCompile(request, callback) { doCompile(request, callback) {
if (callback == null) { callback = function(error, outputFiles) {}; } if (callback == null) {
const compileDir = getCompileDir(request.project_id, request.user_id); callback = function(error, outputFiles) {}
let timer = new Metrics.Timer("write-to-disk");
logger.log({project_id: request.project_id, user_id: request.user_id}, "syncing resources to disk");
return ResourceWriter.syncResourcesToDisk(request, compileDir, function(error, resourceList) {
// NOTE: resourceList is insecure, it should only be used to exclude files from the output list
if ((error != null) && error instanceof Errors.FilesOutOfSyncError) {
logger.warn({project_id: request.project_id, user_id: request.user_id}, "files out of sync, please retry");
return callback(error);
} else if (error != null) {
logger.err({err:error, project_id: request.project_id, user_id: request.user_id}, "error writing resources to disk");
return callback(error);
} }
logger.log({project_id: request.project_id, user_id: request.user_id, time_taken: Date.now() - timer.start}, "written files to disk"); const compileDir = getCompileDir(request.project_id, request.user_id)
timer.done(); let timer = new Metrics.Timer('write-to-disk')
logger.log(
{ project_id: request.project_id, user_id: request.user_id },
'syncing resources to disk'
)
return ResourceWriter.syncResourcesToDisk(request, compileDir, function(
error,
resourceList
) {
// NOTE: resourceList is insecure, it should only be used to exclude files from the output list
if (error != null && error instanceof Errors.FilesOutOfSyncError) {
logger.warn(
{ project_id: request.project_id, user_id: request.user_id },
'files out of sync, please retry'
)
return callback(error)
} else if (error != null) {
logger.err(
{
err: error,
project_id: request.project_id,
user_id: request.user_id
},
'error writing resources to disk'
)
return callback(error)
}
logger.log(
{
project_id: request.project_id,
user_id: request.user_id,
time_taken: Date.now() - timer.start
},
'written files to disk'
)
timer.done()
const injectDraftModeIfRequired = function(callback) { const injectDraftModeIfRequired = function(callback) {
if (request.draft) { if (request.draft) {
return DraftModeManager.injectDraftMode(Path.join(compileDir, request.rootResourcePath), callback); return DraftModeManager.injectDraftMode(
Path.join(compileDir, request.rootResourcePath),
callback
)
} else { } else {
return callback(); return callback()
}
} }
};
const createTikzFileIfRequired = callback => const createTikzFileIfRequired = callback =>
TikzManager.checkMainFile(compileDir, request.rootResourcePath, resourceList, function(error, needsMainFile) { TikzManager.checkMainFile(
if (error != null) { return callback(error); } compileDir,
if (needsMainFile) { request.rootResourcePath,
return TikzManager.injectOutputFile(compileDir, request.rootResourcePath, callback); resourceList,
} else { function(error, needsMainFile) {
return callback(); if (error != null) {
return callback(error)
} }
}) if (needsMainFile) {
; return TikzManager.injectOutputFile(
compileDir,
request.rootResourcePath,
callback
)
} else {
return callback()
}
}
)
// set up environment variables for chktex // set up environment variables for chktex
const env = {}; const env = {}
// only run chktex on LaTeX files (not knitr .Rtex files or any others) // only run chktex on LaTeX files (not knitr .Rtex files or any others)
const isLaTeXFile = request.rootResourcePath != null ? request.rootResourcePath.match(/\.tex$/i) : undefined; const isLaTeXFile =
if ((request.check != null) && isLaTeXFile) { request.rootResourcePath != null
env.CHKTEX_OPTIONS = '-nall -e9 -e10 -w15 -w16'; ? request.rootResourcePath.match(/\.tex$/i)
env.CHKTEX_ULIMIT_OPTIONS = '-t 5 -v 64000'; : undefined
if (request.check != null && isLaTeXFile) {
env.CHKTEX_OPTIONS = '-nall -e9 -e10 -w15 -w16'
env.CHKTEX_ULIMIT_OPTIONS = '-t 5 -v 64000'
if (request.check === 'error') { if (request.check === 'error') {
env.CHKTEX_EXIT_ON_ERROR = 1; env.CHKTEX_EXIT_ON_ERROR = 1
} }
if (request.check === 'validate') { if (request.check === 'validate') {
env.CHKTEX_VALIDATE = 1; env.CHKTEX_VALIDATE = 1
} }
} }
// apply a series of file modifications/creations for draft mode and tikz // apply a series of file modifications/creations for draft mode and tikz
return async.series([injectDraftModeIfRequired, createTikzFileIfRequired], function(error) { return async.series(
if (error != null) { return callback(error); } [injectDraftModeIfRequired, createTikzFileIfRequired],
timer = new Metrics.Timer("run-compile"); function(error) {
if (error != null) {
return callback(error)
}
timer = new Metrics.Timer('run-compile')
// find the image tag to log it as a metric, e.g. 2015.1 (convert . to - for graphite) // find the image tag to log it as a metric, e.g. 2015.1 (convert . to - for graphite)
let tag = __guard__(__guard__(request.imageName != null ? request.imageName.match(/:(.*)/) : undefined, x1 => x1[1]), x => x.replace(/\./g,'-')) || "default"; let tag =
if (!request.project_id.match(/^[0-9a-f]{24}$/)) { tag = "other"; } // exclude smoke test __guard__(
Metrics.inc("compiles"); __guard__(
Metrics.inc(`compiles-with-image.${tag}`); request.imageName != null
const compileName = getCompileName(request.project_id, request.user_id); ? request.imageName.match(/:(.*)/)
return LatexRunner.runLatex(compileName, { : undefined,
x1 => x1[1]
),
x => x.replace(/\./g, '-')
) || 'default'
if (!request.project_id.match(/^[0-9a-f]{24}$/)) {
tag = 'other'
} // exclude smoke test
Metrics.inc('compiles')
Metrics.inc(`compiles-with-image.${tag}`)
const compileName = getCompileName(
request.project_id,
request.user_id
)
return LatexRunner.runLatex(
compileName,
{
directory: compileDir, directory: compileDir,
mainFile: request.rootResourcePath, mainFile: request.rootResourcePath,
compiler: request.compiler, compiler: request.compiler,
@@ -125,292 +196,459 @@ module.exports = (CompileManager = {
image: request.imageName, image: request.imageName,
flags: request.flags, flags: request.flags,
environment: env environment: env
}, function(error, output, stats, timings) { },
function(error, output, stats, timings) {
// request was for validation only // request was for validation only
let metric_key, metric_value; let metric_key, metric_value
if (request.check === "validate") { if (request.check === 'validate') {
const result = (error != null ? error.code : undefined) ? "fail" : "pass"; const result = (error != null
error = new Error("validation"); ? error.code
error.validate = result; : undefined)
? 'fail'
: 'pass'
error = new Error('validation')
error.validate = result
} }
// request was for compile, and failed on validation // request was for compile, and failed on validation
if ((request.check === "error") && ((error != null ? error.message : undefined) === 'exited')) { if (
error = new Error("compilation"); request.check === 'error' &&
error.validate = "fail"; (error != null ? error.message : undefined) === 'exited'
) {
error = new Error('compilation')
error.validate = 'fail'
} }
// compile was killed by user, was a validation, or a compile which failed validation // compile was killed by user, was a validation, or a compile which failed validation
if ((error != null ? error.terminated : undefined) || (error != null ? error.validate : undefined) || (error != null ? error.timedout : undefined)) { if (
OutputFileFinder.findOutputFiles(resourceList, compileDir, function(err, outputFiles) { (error != null ? error.terminated : undefined) ||
if (err != null) { return callback(err); } (error != null ? error.validate : undefined) ||
error.outputFiles = outputFiles; // return output files so user can check logs (error != null ? error.timedout : undefined)
return callback(error); ) {
}); OutputFileFinder.findOutputFiles(
return; resourceList,
compileDir,
function(err, outputFiles) {
if (err != null) {
return callback(err)
}
error.outputFiles = outputFiles // return output files so user can check logs
return callback(error)
}
)
return
} }
// compile completed normally // compile completed normally
if (error != null) { return callback(error); } if (error != null) {
Metrics.inc("compiles-succeeded"); return callback(error)
const object = stats || {}; }
Metrics.inc('compiles-succeeded')
const object = stats || {}
for (metric_key in object) { for (metric_key in object) {
metric_value = object[metric_key]; metric_value = object[metric_key]
Metrics.count(metric_key, metric_value); Metrics.count(metric_key, metric_value)
} }
const object1 = timings || {}; const object1 = timings || {}
for (metric_key in object1) { for (metric_key in object1) {
metric_value = object1[metric_key]; metric_value = object1[metric_key]
Metrics.timing(metric_key, metric_value); Metrics.timing(metric_key, metric_value)
} }
const loadavg = typeof os.loadavg === 'function' ? os.loadavg() : undefined; const loadavg =
if (loadavg != null) { Metrics.gauge("load-avg", loadavg[0]); } typeof os.loadavg === 'function' ? os.loadavg() : undefined
const ts = timer.done(); if (loadavg != null) {
logger.log({project_id: request.project_id, user_id: request.user_id, time_taken: ts, stats, timings, loadavg}, "done compile"); Metrics.gauge('load-avg', loadavg[0])
if ((stats != null ? stats["latex-runs"] : undefined) > 0) {
Metrics.timing("run-compile-per-pass", ts / stats["latex-runs"]);
} }
if (((stats != null ? stats["latex-runs"] : undefined) > 0) && ((timings != null ? timings["cpu-time"] : undefined) > 0)) { const ts = timer.done()
Metrics.timing("run-compile-cpu-time-per-pass", timings["cpu-time"] / stats["latex-runs"]); logger.log(
{
project_id: request.project_id,
user_id: request.user_id,
time_taken: ts,
stats,
timings,
loadavg
},
'done compile'
)
if ((stats != null ? stats['latex-runs'] : undefined) > 0) {
Metrics.timing('run-compile-per-pass', ts / stats['latex-runs'])
}
if (
(stats != null ? stats['latex-runs'] : undefined) > 0 &&
(timings != null ? timings['cpu-time'] : undefined) > 0
) {
Metrics.timing(
'run-compile-cpu-time-per-pass',
timings['cpu-time'] / stats['latex-runs']
)
} }
return OutputFileFinder.findOutputFiles(resourceList, compileDir, function(error, outputFiles) { return OutputFileFinder.findOutputFiles(
if (error != null) { return callback(error); } resourceList,
return OutputCacheManager.saveOutputFiles(outputFiles, compileDir, (error, newOutputFiles) => callback(null, newOutputFiles)); compileDir,
}); function(error, outputFiles) {
}); if (error != null) {
}); return callback(error)
}); }
return OutputCacheManager.saveOutputFiles(
outputFiles,
compileDir,
(error, newOutputFiles) => callback(null, newOutputFiles)
)
}
)
}
)
}
)
})
}, },
stopCompile(project_id, user_id, callback) { stopCompile(project_id, user_id, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
const compileName = getCompileName(project_id, user_id); callback = function(error) {}
return LatexRunner.killLatex(compileName, callback); }
const compileName = getCompileName(project_id, user_id)
return LatexRunner.killLatex(compileName, callback)
}, },
clearProject(project_id, user_id, _callback) { clearProject(project_id, user_id, _callback) {
if (_callback == null) { _callback = function(error) {}; } if (_callback == null) {
_callback = function(error) {}
}
const callback = function(error) { const callback = function(error) {
_callback(error); _callback(error)
return _callback = function() {}; return (_callback = function() {})
}; }
const compileDir = getCompileDir(project_id, user_id); const compileDir = getCompileDir(project_id, user_id)
return CompileManager._checkDirectory(compileDir, function(err, exists) { return CompileManager._checkDirectory(compileDir, function(err, exists) {
if (err != null) { return callback(err); } if (err != null) {
if (!exists) { return callback(); } // skip removal if no directory present return callback(err)
const proc = child_process.spawn("rm", ["-r", compileDir]);
proc.on("error", callback);
let stderr = "";
proc.stderr.on("data", chunk => stderr += chunk.toString());
return proc.on("close", function(code) {
if (code === 0) {
return callback(null);
} else {
return callback(new Error(`rm -r ${compileDir} failed: ${stderr}`));
} }
}); if (!exists) {
}); return callback()
} // skip removal if no directory present
const proc = child_process.spawn('rm', ['-r', compileDir])
proc.on('error', callback)
let stderr = ''
proc.stderr.on('data', chunk => (stderr += chunk.toString()))
return proc.on('close', function(code) {
if (code === 0) {
return callback(null)
} else {
return callback(new Error(`rm -r ${compileDir} failed: ${stderr}`))
}
})
})
}, },
_findAllDirs(callback) { _findAllDirs(callback) {
if (callback == null) { callback = function(error, allDirs) {}; } if (callback == null) {
const root = Settings.path.compilesDir; callback = function(error, allDirs) {}
}
const root = Settings.path.compilesDir
return fs.readdir(root, function(err, files) { return fs.readdir(root, function(err, files) {
if (err != null) { return callback(err); } if (err != null) {
const allDirs = (Array.from(files).map((file) => Path.join(root, file))); return callback(err)
return callback(null, allDirs); }
}); const allDirs = Array.from(files).map(file => Path.join(root, file))
return callback(null, allDirs)
})
}, },
clearExpiredProjects(max_cache_age_ms, callback) { clearExpiredProjects(max_cache_age_ms, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
const now = Date.now(); callback = function(error) {}
}
const now = Date.now()
// action for each directory // action for each directory
const expireIfNeeded = (checkDir, cb) => const expireIfNeeded = (checkDir, cb) =>
fs.stat(checkDir, function(err, stats) { fs.stat(checkDir, function(err, stats) {
if (err != null) { return cb(); } // ignore errors checking directory if (err != null) {
const age = now - stats.mtime; return cb()
const hasExpired = (age > max_cache_age_ms); } // ignore errors checking directory
if (hasExpired) { return fse.remove(checkDir, cb); } else { return cb(); } const age = now - stats.mtime
const hasExpired = age > max_cache_age_ms
if (hasExpired) {
return fse.remove(checkDir, cb)
} else {
return cb()
}
}) })
;
// iterate over all project directories // iterate over all project directories
return CompileManager._findAllDirs(function(error, allDirs) { return CompileManager._findAllDirs(function(error, allDirs) {
if (error != null) { return callback(); } if (error != null) {
return async.eachSeries(allDirs, expireIfNeeded, callback); return callback()
}); }
return async.eachSeries(allDirs, expireIfNeeded, callback)
})
}, },
_checkDirectory(compileDir, callback) { _checkDirectory(compileDir, callback) {
if (callback == null) { callback = function(error, exists) {}; } if (callback == null) {
callback = function(error, exists) {}
}
return fs.lstat(compileDir, function(err, stats) { return fs.lstat(compileDir, function(err, stats) {
if ((err != null ? err.code : undefined) === 'ENOENT') { if ((err != null ? err.code : undefined) === 'ENOENT') {
return callback(null, false); // directory does not exist return callback(null, false) // directory does not exist
} else if (err != null) { } else if (err != null) {
logger.err({dir: compileDir, err}, "error on stat of project directory for removal"); logger.err(
return callback(err); { dir: compileDir, err },
'error on stat of project directory for removal'
)
return callback(err)
} else if (!(stats != null ? stats.isDirectory() : undefined)) { } else if (!(stats != null ? stats.isDirectory() : undefined)) {
logger.err({dir: compileDir, stats}, "bad project directory for removal"); logger.err(
return callback(new Error("project directory is not directory")); { dir: compileDir, stats },
'bad project directory for removal'
)
return callback(new Error('project directory is not directory'))
} else { } else {
return callback(null, true); return callback(null, true)
} }
}); })
}, // directory exists }, // directory exists
syncFromCode(project_id, user_id, file_name, line, column, callback) { syncFromCode(project_id, user_id, file_name, line, column, callback) {
// If LaTeX was run in a virtual environment, the file path that synctex expects // If LaTeX was run in a virtual environment, the file path that synctex expects
// might not match the file path on the host. The .synctex.gz file however, will be accessed // might not match the file path on the host. The .synctex.gz file however, will be accessed
// wherever it is on the host. // wherever it is on the host.
if (callback == null) { callback = function(error, pdfPositions) {}; } if (callback == null) {
const compileName = getCompileName(project_id, user_id); callback = function(error, pdfPositions) {}
const base_dir = Settings.path.synctexBaseDir(compileName); }
const file_path = base_dir + "/" + file_name; const compileName = getCompileName(project_id, user_id)
const compileDir = getCompileDir(project_id, user_id); const base_dir = Settings.path.synctexBaseDir(compileName)
const synctex_path = `${base_dir}/output.pdf`; const file_path = base_dir + '/' + file_name
const command = ["code", synctex_path, file_path, line, column]; const compileDir = getCompileDir(project_id, user_id)
const synctex_path = `${base_dir}/output.pdf`
const command = ['code', synctex_path, file_path, line, column]
return fse.ensureDir(compileDir, function(error) { return fse.ensureDir(compileDir, function(error) {
if (error != null) { if (error != null) {
logger.err({error, project_id, user_id, file_name}, "error ensuring dir for sync from code"); logger.err(
return callback(error); { error, project_id, user_id, file_name },
'error ensuring dir for sync from code'
)
return callback(error)
} }
return CompileManager._runSynctex(project_id, user_id, command, function(error, stdout) { return CompileManager._runSynctex(project_id, user_id, command, function(
if (error != null) { return callback(error); } error,
logger.log({project_id, user_id, file_name, line, column, command, stdout}, "synctex code output"); stdout
return callback(null, CompileManager._parseSynctexFromCodeOutput(stdout)); ) {
}); if (error != null) {
}); return callback(error)
}
logger.log(
{ project_id, user_id, file_name, line, column, command, stdout },
'synctex code output'
)
return callback(
null,
CompileManager._parseSynctexFromCodeOutput(stdout)
)
})
})
}, },
syncFromPdf(project_id, user_id, page, h, v, callback) { syncFromPdf(project_id, user_id, page, h, v, callback) {
if (callback == null) { callback = function(error, filePositions) {}; } if (callback == null) {
const compileName = getCompileName(project_id, user_id); callback = function(error, filePositions) {}
const compileDir = getCompileDir(project_id, user_id); }
const base_dir = Settings.path.synctexBaseDir(compileName); const compileName = getCompileName(project_id, user_id)
const synctex_path = `${base_dir}/output.pdf`; const compileDir = getCompileDir(project_id, user_id)
const command = ["pdf", synctex_path, page, h, v]; const base_dir = Settings.path.synctexBaseDir(compileName)
const synctex_path = `${base_dir}/output.pdf`
const command = ['pdf', synctex_path, page, h, v]
return fse.ensureDir(compileDir, function(error) { return fse.ensureDir(compileDir, function(error) {
if (error != null) { if (error != null) {
logger.err({error, project_id, user_id, file_name}, "error ensuring dir for sync to code"); logger.err(
return callback(error); { error, project_id, user_id, file_name },
'error ensuring dir for sync to code'
)
return callback(error)
} }
return CompileManager._runSynctex(project_id, user_id, command, function(error, stdout) { return CompileManager._runSynctex(project_id, user_id, command, function(
if (error != null) { return callback(error); } error,
logger.log({project_id, user_id, page, h, v, stdout}, "synctex pdf output"); stdout
return callback(null, CompileManager._parseSynctexFromPdfOutput(stdout, base_dir)); ) {
}); if (error != null) {
}); return callback(error)
}
logger.log(
{ project_id, user_id, page, h, v, stdout },
'synctex pdf output'
)
return callback(
null,
CompileManager._parseSynctexFromPdfOutput(stdout, base_dir)
)
})
})
}, },
_checkFileExists(path, callback) { _checkFileExists(path, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
const synctexDir = Path.dirname(path); callback = function(error) {}
const synctexFile = Path.join(synctexDir, "output.synctex.gz"); }
const synctexDir = Path.dirname(path)
const synctexFile = Path.join(synctexDir, 'output.synctex.gz')
return fs.stat(synctexDir, function(error, stats) { return fs.stat(synctexDir, function(error, stats) {
if ((error != null ? error.code : undefined) === 'ENOENT') { if ((error != null ? error.code : undefined) === 'ENOENT') {
return callback(new Errors.NotFoundError("called synctex with no output directory")); return callback(
new Errors.NotFoundError('called synctex with no output directory')
)
}
if (error != null) {
return callback(error)
} }
if (error != null) { return callback(error); }
return fs.stat(synctexFile, function(error, stats) { return fs.stat(synctexFile, function(error, stats) {
if ((error != null ? error.code : undefined) === 'ENOENT') { if ((error != null ? error.code : undefined) === 'ENOENT') {
return callback(new Errors.NotFoundError("called synctex with no output file")); return callback(
new Errors.NotFoundError('called synctex with no output file')
)
} }
if (error != null) { return callback(error); } if (error != null) {
if (!(stats != null ? stats.isFile() : undefined)) { return callback(new Error("not a file")); } return callback(error)
return callback(); }
}); if (!(stats != null ? stats.isFile() : undefined)) {
}); return callback(new Error('not a file'))
}
return callback()
})
})
}, },
_runSynctex(project_id, user_id, command, callback) { _runSynctex(project_id, user_id, command, callback) {
if (callback == null) { callback = function(error, stdout) {}; } if (callback == null) {
const seconds = 1000; callback = function(error, stdout) {}
command.unshift("/opt/synctex");
const directory = getCompileDir(project_id, user_id);
const timeout = 60 * 1000; // increased to allow for large projects
const compileName = getCompileName(project_id, user_id);
return CommandRunner.run(compileName, command, directory, Settings.clsi != null ? Settings.clsi.docker.image : undefined, timeout, {}, function(error, output) {
if (error != null) {
logger.err({err:error, command, project_id, user_id}, "error running synctex");
return callback(error);
} }
return callback(null, output.stdout); const seconds = 1000
});
command.unshift('/opt/synctex')
const directory = getCompileDir(project_id, user_id)
const timeout = 60 * 1000 // increased to allow for large projects
const compileName = getCompileName(project_id, user_id)
return CommandRunner.run(
compileName,
command,
directory,
Settings.clsi != null ? Settings.clsi.docker.image : undefined,
timeout,
{},
function(error, output) {
if (error != null) {
logger.err(
{ err: error, command, project_id, user_id },
'error running synctex'
)
return callback(error)
}
return callback(null, output.stdout)
}
)
}, },
_parseSynctexFromCodeOutput(output) { _parseSynctexFromCodeOutput(output) {
const results = []; const results = []
for (const line of Array.from(output.split("\n"))) { for (const line of Array.from(output.split('\n'))) {
const [node, page, h, v, width, height] = Array.from(line.split("\t")); const [node, page, h, v, width, height] = Array.from(line.split('\t'))
if (node === "NODE") { if (node === 'NODE') {
results.push({ results.push({
page: parseInt(page, 10), page: parseInt(page, 10),
h: parseFloat(h), h: parseFloat(h),
v: parseFloat(v), v: parseFloat(v),
height: parseFloat(height), height: parseFloat(height),
width: parseFloat(width) width: parseFloat(width)
}); })
} }
} }
return results; return results
}, },
_parseSynctexFromPdfOutput(output, base_dir) { _parseSynctexFromPdfOutput(output, base_dir) {
const results = []; const results = []
for (let line of Array.from(output.split("\n"))) { for (let line of Array.from(output.split('\n'))) {
let column, file_path, node; let column, file_path, node
[node, file_path, line, column] = Array.from(line.split("\t")); ;[node, file_path, line, column] = Array.from(line.split('\t'))
if (node === "NODE") { if (node === 'NODE') {
const file = file_path.slice(base_dir.length + 1); const file = file_path.slice(base_dir.length + 1)
results.push({ results.push({
file, file,
line: parseInt(line, 10), line: parseInt(line, 10),
column: parseInt(column, 10) column: parseInt(column, 10)
}); })
} }
} }
return results; return results
}, },
wordcount(project_id, user_id, file_name, image, callback) { wordcount(project_id, user_id, file_name, image, callback) {
if (callback == null) { callback = function(error, pdfPositions) {}; } if (callback == null) {
logger.log({project_id, user_id, file_name, image}, "running wordcount"); callback = function(error, pdfPositions) {}
const file_path = `$COMPILE_DIR/${file_name}`; }
const command = [ "texcount", '-nocol', '-inc', file_path, `-out=${file_path}.wc`]; logger.log({ project_id, user_id, file_name, image }, 'running wordcount')
const compileDir = getCompileDir(project_id, user_id); const file_path = `$COMPILE_DIR/${file_name}`
const timeout = 60 * 1000; const command = [
const compileName = getCompileName(project_id, user_id); 'texcount',
'-nocol',
'-inc',
file_path,
`-out=${file_path}.wc`
]
const compileDir = getCompileDir(project_id, user_id)
const timeout = 60 * 1000
const compileName = getCompileName(project_id, user_id)
return fse.ensureDir(compileDir, function(error) { return fse.ensureDir(compileDir, function(error) {
if (error != null) { if (error != null) {
logger.err({error, project_id, user_id, file_name}, "error ensuring dir for sync from code"); logger.err(
return callback(error); { error, project_id, user_id, file_name },
'error ensuring dir for sync from code'
)
return callback(error)
} }
return CommandRunner.run(compileName, command, compileDir, image, timeout, {}, function(error) { return CommandRunner.run(
if (error != null) { return callback(error); } compileName,
return fs.readFile(compileDir + "/" + file_name + ".wc", "utf-8", function(err, stdout) { command,
compileDir,
image,
timeout,
{},
function(error) {
if (error != null) {
return callback(error)
}
return fs.readFile(
compileDir + '/' + file_name + '.wc',
'utf-8',
function(err, stdout) {
if (err != null) { if (err != null) {
// call it node_err so sentry doesn't use random path error as unique id so it can't be ignored // call it node_err so sentry doesn't use random path error as unique id so it can't be ignored
logger.err({node_err:err, command, compileDir, project_id, user_id}, "error reading word count output"); logger.err(
return callback(err); { node_err: err, command, compileDir, project_id, user_id },
'error reading word count output'
)
return callback(err)
} }
const results = CompileManager._parseWordcountFromOutput(stdout); const results = CompileManager._parseWordcountFromOutput(stdout)
logger.log({project_id, user_id, wordcount: results}, "word count results"); logger.log(
return callback(null, results); { project_id, user_id, wordcount: results },
}); 'word count results'
}); )
}); return callback(null, results)
}
)
}
)
})
}, },
_parseWordcountFromOutput(output) { _parseWordcountFromOutput(output) {
const results = { const results = {
encode: "", encode: '',
textWords: 0, textWords: 0,
headWords: 0, headWords: 0,
outside: 0, outside: 0,
@@ -419,45 +657,49 @@ module.exports = (CompileManager = {
mathInline: 0, mathInline: 0,
mathDisplay: 0, mathDisplay: 0,
errors: 0, errors: 0,
messages: "" messages: ''
};
for (const line of Array.from(output.split("\n"))) {
const [data, info] = Array.from(line.split(":"));
if (data.indexOf("Encoding") > -1) {
results.encode = info.trim();
} }
if (data.indexOf("in text") > -1) { for (const line of Array.from(output.split('\n'))) {
results.textWords = parseInt(info, 10); const [data, info] = Array.from(line.split(':'))
if (data.indexOf('Encoding') > -1) {
results.encode = info.trim()
} }
if (data.indexOf("in head") > -1) { if (data.indexOf('in text') > -1) {
results.headWords = parseInt(info, 10); results.textWords = parseInt(info, 10)
} }
if (data.indexOf("outside") > -1) { if (data.indexOf('in head') > -1) {
results.outside = parseInt(info, 10); results.headWords = parseInt(info, 10)
} }
if (data.indexOf("of head") > -1) { if (data.indexOf('outside') > -1) {
results.headers = parseInt(info, 10); results.outside = parseInt(info, 10)
} }
if (data.indexOf("Number of floats/tables/figures") > -1) { if (data.indexOf('of head') > -1) {
results.elements = parseInt(info, 10); results.headers = parseInt(info, 10)
} }
if (data.indexOf("Number of math inlines") > -1) { if (data.indexOf('Number of floats/tables/figures') > -1) {
results.mathInline = parseInt(info, 10); results.elements = parseInt(info, 10)
} }
if (data.indexOf("Number of math displayed") > -1) { if (data.indexOf('Number of math inlines') > -1) {
results.mathDisplay = parseInt(info, 10); results.mathInline = parseInt(info, 10)
} }
if (data === "(errors") { // errors reported as (errors:123) if (data.indexOf('Number of math displayed') > -1) {
results.errors = parseInt(info, 10); results.mathDisplay = parseInt(info, 10)
} }
if (line.indexOf("!!! ") > -1) { // errors logged as !!! message !!! if (data === '(errors') {
results.messages += line + "\n"; // errors reported as (errors:123)
results.errors = parseInt(info, 10)
}
if (line.indexOf('!!! ') > -1) {
// errors logged as !!! message !!!
results.messages += line + '\n'
} }
} }
return results; return results
}
} }
});
function __guard__(value, transform) { function __guard__(value, transform) {
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
} }

View File

@@ -3,31 +3,36 @@
*/ */
// TODO: This file was created by bulk-decaffeinate. // TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint. // Fix any style issues and re-enable lint.
let ContentTypeMapper; let ContentTypeMapper
const Path = require('path'); const Path = require('path')
// here we coerce html, css and js to text/plain, // here we coerce html, css and js to text/plain,
// otherwise choose correct mime type based on file extension, // otherwise choose correct mime type based on file extension,
// falling back to octet-stream // falling back to octet-stream
module.exports = (ContentTypeMapper = { module.exports = ContentTypeMapper = {
map(path) { map(path) {
switch (Path.extname(path)) { switch (Path.extname(path)) {
case '.txt': case '.html': case '.js': case '.css': case '.svg': case '.txt':
return 'text/plain'; case '.html':
case '.js':
case '.css':
case '.svg':
return 'text/plain'
case '.csv': case '.csv':
return 'text/csv'; return 'text/csv'
case '.pdf': case '.pdf':
return 'application/pdf'; return 'application/pdf'
case '.png': case '.png':
return 'image/png'; return 'image/png'
case '.jpg': case '.jpeg': case '.jpg':
return 'image/jpeg'; case '.jpeg':
return 'image/jpeg'
case '.tiff': case '.tiff':
return 'image/tiff'; return 'image/tiff'
case '.gif': case '.gif':
return 'image/gif'; return 'image/gif'
default: default:
return 'application/octet-stream'; return 'application/octet-stream'
}
} }
} }
});

View File

@@ -5,14 +5,14 @@
* DS102: Remove unnecessary code created because of implicit returns * DS102: Remove unnecessary code created because of implicit returns
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
const async = require("async"); const async = require('async')
const Settings = require("settings-sharelatex"); const Settings = require('settings-sharelatex')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const queue = async.queue((task, cb)=> task(cb) const queue = async.queue(
, Settings.parallelSqlQueryLimit); (task, cb) => task(cb),
Settings.parallelSqlQueryLimit
)
queue.drain = ()=> logger.debug('all items have been processed'); queue.drain = () => logger.debug('all items have been processed')
module.exports =
{queue};
module.exports = { queue }

View File

@@ -10,80 +10,104 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let LockManager; let LockManager
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const LockState = {}; // locks for docker container operations, by container name const LockState = {} // locks for docker container operations, by container name
module.exports = (LockManager = {
module.exports = LockManager = {
MAX_LOCK_HOLD_TIME: 15000, // how long we can keep a lock MAX_LOCK_HOLD_TIME: 15000, // how long we can keep a lock
MAX_LOCK_WAIT_TIME: 10000, // how long we wait for a lock MAX_LOCK_WAIT_TIME: 10000, // how long we wait for a lock
LOCK_TEST_INTERVAL: 1000, // retry time LOCK_TEST_INTERVAL: 1000, // retry time
tryLock(key, callback) { tryLock(key, callback) {
let lockValue; let lockValue
if (callback == null) { callback = function(err, gotLock) {}; } if (callback == null) {
const existingLock = LockState[key]; callback = function(err, gotLock) {}
if (existingLock != null) { // the lock is already taken, check how old it is }
const lockAge = Date.now() - existingLock.created; const existingLock = LockState[key]
if (existingLock != null) {
// the lock is already taken, check how old it is
const lockAge = Date.now() - existingLock.created
if (lockAge < LockManager.MAX_LOCK_HOLD_TIME) { if (lockAge < LockManager.MAX_LOCK_HOLD_TIME) {
return callback(null, false); // we didn't get the lock, bail out return callback(null, false) // we didn't get the lock, bail out
} else { } else {
logger.error({key, lock: existingLock, age:lockAge}, "taking old lock by force"); logger.error(
{ key, lock: existingLock, age: lockAge },
'taking old lock by force'
)
} }
} }
// take the lock // take the lock
LockState[key] = (lockValue = {created: Date.now()}); LockState[key] = lockValue = { created: Date.now() }
return callback(null, true, lockValue); return callback(null, true, lockValue)
}, },
getLock(key, callback) { getLock(key, callback) {
let attempt; let attempt
if (callback == null) { callback = function(error, lockValue) {}; } if (callback == null) {
const startTime = Date.now(); callback = function(error, lockValue) {}
}
const startTime = Date.now()
return (attempt = () => return (attempt = () =>
LockManager.tryLock(key, function(error, gotLock, lockValue) { LockManager.tryLock(key, function(error, gotLock, lockValue) {
if (error != null) { return callback(error); } if (error != null) {
if (gotLock) { return callback(error)
return callback(null, lockValue);
} else if ((Date.now() - startTime) > LockManager.MAX_LOCK_WAIT_TIME) {
const e = new Error("Lock timeout");
e.key = key;
return callback(e);
} else {
return setTimeout(attempt, LockManager.LOCK_TEST_INTERVAL);
} }
}) if (gotLock) {
)(); return callback(null, lockValue)
} else if (Date.now() - startTime > LockManager.MAX_LOCK_WAIT_TIME) {
const e = new Error('Lock timeout')
e.key = key
return callback(e)
} else {
return setTimeout(attempt, LockManager.LOCK_TEST_INTERVAL)
}
}))()
}, },
releaseLock(key, lockValue, callback) { releaseLock(key, lockValue, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
const existingLock = LockState[key]; callback = function(error) {}
if (existingLock === lockValue) { // lockValue is an object, so we can test by reference }
delete LockState[key]; // our lock, so we can free it const existingLock = LockState[key]
return callback(); if (existingLock === lockValue) {
} else if (existingLock != null) { // lock exists but doesn't match ours // lockValue is an object, so we can test by reference
logger.error({key, lock: existingLock}, "tried to release lock taken by force"); delete LockState[key] // our lock, so we can free it
return callback(); return callback()
} else if (existingLock != null) {
// lock exists but doesn't match ours
logger.error(
{ key, lock: existingLock },
'tried to release lock taken by force'
)
return callback()
} else { } else {
logger.error({key, lock: existingLock}, "tried to release lock that has gone"); logger.error(
return callback(); { key, lock: existingLock },
'tried to release lock that has gone'
)
return callback()
} }
}, },
runWithLock(key, runner, callback) { runWithLock(key, runner, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
callback = function(error) {}
}
return LockManager.getLock(key, function(error, lockValue) { return LockManager.getLock(key, function(error, lockValue) {
if (error != null) { return callback(error); } if (error != null) {
return callback(error)
}
return runner((error1, ...args) => return runner((error1, ...args) =>
LockManager.releaseLock(key, lockValue, function(error2) { LockManager.releaseLock(key, lockValue, function(error2) {
error = error1 || error2; error = error1 || error2
if (error != null) { return callback(error); } if (error != null) {
return callback(null, ...Array.from(args)); return callback(error)
}) }
); return callback(null, ...Array.from(args))
}); })
)
})
}
} }
});

View File

@@ -15,380 +15,525 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let DockerRunner, oneHour; let DockerRunner, oneHour
const Settings = require("settings-sharelatex"); const Settings = require('settings-sharelatex')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const Docker = require("dockerode"); const Docker = require('dockerode')
const dockerode = new Docker(); const dockerode = new Docker()
const crypto = require("crypto"); const crypto = require('crypto')
const async = require("async"); const async = require('async')
const LockManager = require("./DockerLockManager"); const LockManager = require('./DockerLockManager')
const fs = require("fs"); const fs = require('fs')
const Path = require('path'); const Path = require('path')
const _ = require("underscore"); const _ = require('underscore')
logger.info("using docker runner"); logger.info('using docker runner')
const usingSiblingContainers = () => __guard__(Settings != null ? Settings.path : undefined, x => x.sandboxedCompilesHostDir) != null; const usingSiblingContainers = () =>
__guard__(
Settings != null ? Settings.path : undefined,
x => x.sandboxedCompilesHostDir
) != null
module.exports = (DockerRunner = { module.exports = DockerRunner = {
ERR_NOT_DIRECTORY: new Error("not a directory"), ERR_NOT_DIRECTORY: new Error('not a directory'),
ERR_TERMINATED: new Error("terminated"), ERR_TERMINATED: new Error('terminated'),
ERR_EXITED: new Error("exited"), ERR_EXITED: new Error('exited'),
ERR_TIMED_OUT: new Error("container timed out"), ERR_TIMED_OUT: new Error('container timed out'),
run(project_id, command, directory, image, timeout, environment, callback) { run(project_id, command, directory, image, timeout, environment, callback) {
let name
let name; if (callback == null) {
if (callback == null) { callback = function(error, output) {}; } callback = function(error, output) {}
}
if (usingSiblingContainers()) { if (usingSiblingContainers()) {
const _newPath = Settings.path.sandboxedCompilesHostDir; const _newPath = Settings.path.sandboxedCompilesHostDir
logger.log({path: _newPath}, "altering bind path for sibling containers"); logger.log(
{ path: _newPath },
'altering bind path for sibling containers'
)
// Server Pro, example: // Server Pro, example:
// '/var/lib/sharelatex/data/compiles/<project-id>' // '/var/lib/sharelatex/data/compiles/<project-id>'
// ... becomes ... // ... becomes ...
// '/opt/sharelatex_data/data/compiles/<project-id>' // '/opt/sharelatex_data/data/compiles/<project-id>'
directory = Path.join(Settings.path.sandboxedCompilesHostDir, Path.basename(directory)); directory = Path.join(
Settings.path.sandboxedCompilesHostDir,
Path.basename(directory)
)
} }
const volumes = {}; const volumes = {}
volumes[directory] = "/compile"; volumes[directory] = '/compile'
command = (Array.from(command).map((arg) => __guardMethod__(arg.toString(), 'replace', o => o.replace('$COMPILE_DIR', "/compile")))); command = Array.from(command).map(arg =>
if ((image == null)) { __guardMethod__(arg.toString(), 'replace', o =>
({ image } = Settings.clsi.docker); o.replace('$COMPILE_DIR', '/compile')
)
)
if (image == null) {
;({ image } = Settings.clsi.docker)
} }
if (Settings.texliveImageNameOveride != null) { if (Settings.texliveImageNameOveride != null) {
const img = image.split("/"); const img = image.split('/')
image = `${Settings.texliveImageNameOveride}/${img[2]}`; image = `${Settings.texliveImageNameOveride}/${img[2]}`
} }
const options = DockerRunner._getContainerOptions(command, image, volumes, timeout, environment); const options = DockerRunner._getContainerOptions(
const fingerprint = DockerRunner._fingerprintContainer(options); command,
options.name = (name = `project-${project_id}-${fingerprint}`); image,
volumes,
timeout,
environment
)
const fingerprint = DockerRunner._fingerprintContainer(options)
options.name = name = `project-${project_id}-${fingerprint}`
// logOptions = _.clone(options) // logOptions = _.clone(options)
// logOptions?.HostConfig?.SecurityOpt = "secomp used, removed in logging" // logOptions?.HostConfig?.SecurityOpt = "secomp used, removed in logging"
logger.log({project_id}, "running docker container"); logger.log({ project_id }, 'running docker container')
DockerRunner._runAndWaitForContainer(options, volumes, timeout, function(error, output) { DockerRunner._runAndWaitForContainer(options, volumes, timeout, function(
if (__guard__(error != null ? error.message : undefined, x => x.match("HTTP code is 500"))) { error,
logger.log({err: error, project_id}, "error running container so destroying and retrying"); output
) {
if (
__guard__(error != null ? error.message : undefined, x =>
x.match('HTTP code is 500')
)
) {
logger.log(
{ err: error, project_id },
'error running container so destroying and retrying'
)
return DockerRunner.destroyContainer(name, null, true, function(error) { return DockerRunner.destroyContainer(name, null, true, function(error) {
if (error != null) { return callback(error); } if (error != null) {
return DockerRunner._runAndWaitForContainer(options, volumes, timeout, callback); return callback(error)
});
} else {
return callback(error, output);
} }
}); return DockerRunner._runAndWaitForContainer(
options,
volumes,
timeout,
callback
)
})
} else {
return callback(error, output)
}
})
return name; return name
}, // pass back the container name to allow it to be killed }, // pass back the container name to allow it to be killed
kill(container_id, callback) { kill(container_id, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
logger.log({container_id}, "sending kill signal to container"); callback = function(error) {}
const container = dockerode.getContainer(container_id); }
logger.log({ container_id }, 'sending kill signal to container')
const container = dockerode.getContainer(container_id)
return container.kill(function(error) { return container.kill(function(error) {
if ((error != null) && __guardMethod__(error != null ? error.message : undefined, 'match', o => o.match(/Cannot kill container .* is not running/))) { if (
logger.warn({err: error, container_id}, "container not running, continuing"); error != null &&
error = null; __guardMethod__(error != null ? error.message : undefined, 'match', o =>
o.match(/Cannot kill container .* is not running/)
)
) {
logger.warn(
{ err: error, container_id },
'container not running, continuing'
)
error = null
} }
if (error != null) { if (error != null) {
logger.error({err: error, container_id}, "error killing container"); logger.error({ err: error, container_id }, 'error killing container')
return callback(error); return callback(error)
} else { } else {
return callback(); return callback()
} }
}); })
}, },
_runAndWaitForContainer(options, volumes, timeout, _callback) { _runAndWaitForContainer(options, volumes, timeout, _callback) {
if (_callback == null) { _callback = function(error, output) {}; } if (_callback == null) {
_callback = function(error, output) {}
}
const callback = function(...args) { const callback = function(...args) {
_callback(...Array.from(args || [])); _callback(...Array.from(args || []))
// Only call the callback once // Only call the callback once
return _callback = function() {}; return (_callback = function() {})
}; }
const { name } = options; const { name } = options
let streamEnded = false; let streamEnded = false
let containerReturned = false; let containerReturned = false
let output = {}; let output = {}
const callbackIfFinished = function() { const callbackIfFinished = function() {
if (streamEnded && containerReturned) { if (streamEnded && containerReturned) {
return callback(null, output); return callback(null, output)
}
} }
};
const attachStreamHandler = function(error, _output) { const attachStreamHandler = function(error, _output) {
if (error != null) { return callback(error); } if (error != null) {
output = _output; return callback(error)
streamEnded = true;
return callbackIfFinished();
};
return DockerRunner.startContainer(options, volumes, attachStreamHandler, function(error, containerId) {
if (error != null) { return callback(error); }
return DockerRunner.waitForContainer(name, timeout, function(error, exitCode) {
let err;
if (error != null) { return callback(error); }
if (exitCode === 137) { // exit status from kill -9
err = DockerRunner.ERR_TERMINATED;
err.terminated = true;
return callback(err);
} }
if (exitCode === 1) { // exit status from chktex output = _output
err = DockerRunner.ERR_EXITED; streamEnded = true
err.code = exitCode; return callbackIfFinished()
return callback(err);
} }
containerReturned = true;
__guard__(options != null ? options.HostConfig : undefined, x => x.SecurityOpt = null); // small log line return DockerRunner.startContainer(
logger.log({err, exitCode, options}, "docker container has exited"); options,
return callbackIfFinished(); volumes,
}); attachStreamHandler,
}); function(error, containerId) {
if (error != null) {
return callback(error)
}
return DockerRunner.waitForContainer(name, timeout, function(
error,
exitCode
) {
let err
if (error != null) {
return callback(error)
}
if (exitCode === 137) {
// exit status from kill -9
err = DockerRunner.ERR_TERMINATED
err.terminated = true
return callback(err)
}
if (exitCode === 1) {
// exit status from chktex
err = DockerRunner.ERR_EXITED
err.code = exitCode
return callback(err)
}
containerReturned = true
__guard__(
options != null ? options.HostConfig : undefined,
x => (x.SecurityOpt = null)
) // small log line
logger.log({ err, exitCode, options }, 'docker container has exited')
return callbackIfFinished()
})
}
)
}, },
_getContainerOptions(command, image, volumes, timeout, environment) { _getContainerOptions(command, image, volumes, timeout, environment) {
let m, year; let m, year
let key, value, hostVol, dockerVol; let key, value, hostVol, dockerVol
const timeoutInSeconds = timeout / 1000; const timeoutInSeconds = timeout / 1000
const dockerVolumes = {}; const dockerVolumes = {}
for (hostVol in volumes) { for (hostVol in volumes) {
dockerVol = volumes[hostVol]; dockerVol = volumes[hostVol]
dockerVolumes[dockerVol] = {}; dockerVolumes[dockerVol] = {}
if (volumes[hostVol].slice(-3).indexOf(":r") === -1) { if (volumes[hostVol].slice(-3).indexOf(':r') === -1) {
volumes[hostVol] = `${dockerVol}:rw`; volumes[hostVol] = `${dockerVol}:rw`
} }
} }
// merge settings and environment parameter // merge settings and environment parameter
const env = {}; const env = {}
for (const src of [Settings.clsi.docker.env, environment || {}]) { for (const src of [Settings.clsi.docker.env, environment || {}]) {
for (key in src) { value = src[key]; env[key] = value; } for (key in src) {
value = src[key]
env[key] = value
}
} }
// set the path based on the image year // set the path based on the image year
if ((m = image.match(/:([0-9]+)\.[0-9]+/))) { if ((m = image.match(/:([0-9]+)\.[0-9]+/))) {
year = m[1]; year = m[1]
} else { } else {
year = "2014"; year = '2014'
} }
env.PATH = `/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/${year}/bin/x86_64-linux/`; env.PATH = `/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/${year}/bin/x86_64-linux/`
const options = { const options = {
"Cmd" : command, Cmd: command,
"Image" : image, Image: image,
"Volumes" : dockerVolumes, Volumes: dockerVolumes,
"WorkingDir" : "/compile", WorkingDir: '/compile',
"NetworkDisabled" : true, NetworkDisabled: true,
"Memory" : 1024 * 1024 * 1024 * 1024, // 1 Gb Memory: 1024 * 1024 * 1024 * 1024, // 1 Gb
"User" : Settings.clsi.docker.user, User: Settings.clsi.docker.user,
"Env" : (((() => { Env: (() => {
const result = []; const result = []
for (key in env) { for (key in env) {
value = env[key]; value = env[key]
result.push(`${key}=${value}`); result.push(`${key}=${value}`)
} }
return result; return result
})())), // convert the environment hash to an array })(), // convert the environment hash to an array
"HostConfig" : { HostConfig: {
"Binds": (((() => { Binds: (() => {
const result1 = []; const result1 = []
for (hostVol in volumes) { for (hostVol in volumes) {
dockerVol = volumes[hostVol]; dockerVol = volumes[hostVol]
result1.push(`${hostVol}:${dockerVol}`); result1.push(`${hostVol}:${dockerVol}`)
}
return result1
})(),
LogConfig: { Type: 'none', Config: {} },
Ulimits: [
{
Name: 'cpu',
Soft: timeoutInSeconds + 5,
Hard: timeoutInSeconds + 10
}
],
CapDrop: 'ALL',
SecurityOpt: ['no-new-privileges']
} }
return result1;
})())),
"LogConfig": {"Type": "none", "Config": {}},
"Ulimits": [{'Name': 'cpu', 'Soft': timeoutInSeconds+5, 'Hard': timeoutInSeconds+10}],
"CapDrop": "ALL",
"SecurityOpt": ["no-new-privileges"]
} }
};
if (
if ((Settings.path != null ? Settings.path.synctexBinHostPath : undefined) != null) { (Settings.path != null ? Settings.path.synctexBinHostPath : undefined) !=
options.HostConfig.Binds.push(`${Settings.path.synctexBinHostPath}:/opt/synctex:ro`); null
) {
options.HostConfig.Binds.push(
`${Settings.path.synctexBinHostPath}:/opt/synctex:ro`
)
} }
if (Settings.clsi.docker.seccomp_profile != null) { if (Settings.clsi.docker.seccomp_profile != null) {
options.HostConfig.SecurityOpt.push(`seccomp=${Settings.clsi.docker.seccomp_profile}`); options.HostConfig.SecurityOpt.push(
`seccomp=${Settings.clsi.docker.seccomp_profile}`
)
} }
return options; return options
}, },
_fingerprintContainer(containerOptions) { _fingerprintContainer(containerOptions) {
// Yay, Hashing! // Yay, Hashing!
const json = JSON.stringify(containerOptions); const json = JSON.stringify(containerOptions)
return crypto.createHash("md5").update(json).digest("hex"); return crypto
.createHash('md5')
.update(json)
.digest('hex')
}, },
startContainer(options, volumes, attachStreamHandler, callback) { startContainer(options, volumes, attachStreamHandler, callback) {
return LockManager.runWithLock(options.name, releaseLock => return LockManager.runWithLock(
options.name,
releaseLock =>
// Check that volumes exist before starting the container. // Check that volumes exist before starting the container.
// When a container is started with volume pointing to a // When a container is started with volume pointing to a
// non-existent directory then docker creates the directory but // non-existent directory then docker creates the directory but
// with root ownership. // with root ownership.
DockerRunner._checkVolumes(options, volumes, function(err) { DockerRunner._checkVolumes(options, volumes, function(err) {
if (err != null) { return releaseLock(err); } if (err != null) {
return DockerRunner._startContainer(options, volumes, attachStreamHandler, releaseLock); return releaseLock(err)
}) }
return DockerRunner._startContainer(
options,
volumes,
attachStreamHandler,
releaseLock
)
}),
, callback); callback
)
}, },
// Check that volumes exist and are directories // Check that volumes exist and are directories
_checkVolumes(options, volumes, callback) { _checkVolumes(options, volumes, callback) {
if (callback == null) { callback = function(error, containerName) {}; } if (callback == null) {
callback = function(error, containerName) {}
}
if (usingSiblingContainers()) { if (usingSiblingContainers()) {
// Server Pro, with sibling-containers active, skip checks // Server Pro, with sibling-containers active, skip checks
return callback(null); return callback(null)
} }
const checkVolume = (path, cb) => const checkVolume = (path, cb) =>
fs.stat(path, function(err, stats) { fs.stat(path, function(err, stats) {
if (err != null) { return cb(err); } if (err != null) {
if (!(stats != null ? stats.isDirectory() : undefined)) { return cb(DockerRunner.ERR_NOT_DIRECTORY); } return cb(err)
return cb();
})
;
const jobs = [];
for (const vol in volumes) {
(vol => jobs.push(cb => checkVolume(vol, cb)))(vol);
} }
return async.series(jobs, callback); if (!(stats != null ? stats.isDirectory() : undefined)) {
return cb(DockerRunner.ERR_NOT_DIRECTORY)
}
return cb()
})
const jobs = []
for (const vol in volumes) {
;(vol => jobs.push(cb => checkVolume(vol, cb)))(vol)
}
return async.series(jobs, callback)
}, },
_startContainer(options, volumes, attachStreamHandler, callback) { _startContainer(options, volumes, attachStreamHandler, callback) {
if (callback == null) { callback = function(error, output) {}; } if (callback == null) {
callback = _.once(callback); callback = function(error, output) {}
const { name } = options; }
callback = _.once(callback)
const { name } = options
logger.log({container_name: name}, "starting container"); logger.log({ container_name: name }, 'starting container')
const container = dockerode.getContainer(name); const container = dockerode.getContainer(name)
const createAndStartContainer = () => const createAndStartContainer = () =>
dockerode.createContainer(options, function(error, container) { dockerode.createContainer(options, function(error, container) {
if (error != null) { return callback(error); } if (error != null) {
return startExistingContainer(); return callback(error)
})
;
var startExistingContainer = () =>
DockerRunner.attachToContainer(options.name, attachStreamHandler, function(error){
if (error != null) { return callback(error); }
return container.start(function(error) {
if ((error != null) && ((error != null ? error.statusCode : undefined) !== 304)) { // already running
return callback(error);
} else {
return callback();
} }
}); return startExistingContainer()
}) })
; var startExistingContainer = () =>
DockerRunner.attachToContainer(
options.name,
attachStreamHandler,
function(error) {
if (error != null) {
return callback(error)
}
return container.start(function(error) {
if (
error != null &&
(error != null ? error.statusCode : undefined) !== 304
) {
// already running
return callback(error)
} else {
return callback()
}
})
}
)
return container.inspect(function(error, stats) { return container.inspect(function(error, stats) {
if ((error != null ? error.statusCode : undefined) === 404) { if ((error != null ? error.statusCode : undefined) === 404) {
return createAndStartContainer(); return createAndStartContainer()
} else if (error != null) { } else if (error != null) {
logger.err({container_name: name, error}, "unable to inspect container to start"); logger.err(
return callback(error); { container_name: name, error },
'unable to inspect container to start'
)
return callback(error)
} else { } else {
return startExistingContainer(); return startExistingContainer()
} }
}); })
}, },
attachToContainer(containerId, attachStreamHandler, attachStartCallback) { attachToContainer(containerId, attachStreamHandler, attachStartCallback) {
const container = dockerode.getContainer(containerId); const container = dockerode.getContainer(containerId)
return container.attach({stdout: 1, stderr: 1, stream: 1}, function(error, stream) { return container.attach({ stdout: 1, stderr: 1, stream: 1 }, function(
error,
stream
) {
if (error != null) { if (error != null) {
logger.error({err: error, container_id: containerId}, "error attaching to container"); logger.error(
return attachStartCallback(error); { err: error, container_id: containerId },
'error attaching to container'
)
return attachStartCallback(error)
} else { } else {
attachStartCallback(); attachStartCallback()
} }
logger.log({ container_id: containerId }, 'attached to container')
logger.log({container_id: containerId}, "attached to container"); const MAX_OUTPUT = 1024 * 1024 // limit output to 1MB
const MAX_OUTPUT = 1024 * 1024; // limit output to 1MB
const createStringOutputStream = function(name) { const createStringOutputStream = function(name) {
return { return {
data: "", data: '',
overflowed: false, overflowed: false,
write(data) { write(data) {
if (this.overflowed) { return; } if (this.overflowed) {
return
}
if (this.data.length < MAX_OUTPUT) { if (this.data.length < MAX_OUTPUT) {
return this.data += data; return (this.data += data)
} else { } else {
logger.error({container_id: containerId, length: this.data.length, maxLen: MAX_OUTPUT}, `${name} exceeds max size`); logger.error(
this.data += `(...truncated at ${MAX_OUTPUT} chars...)`; {
return this.overflowed = true; container_id: containerId,
length: this.data.length,
maxLen: MAX_OUTPUT
},
`${name} exceeds max size`
)
this.data += `(...truncated at ${MAX_OUTPUT} chars...)`
return (this.overflowed = true)
} }
} }
// kill container if too much output // kill container if too much output
// docker.containers.kill(containerId, () ->) // docker.containers.kill(containerId, () ->)
}; }
}; }
const stdout = createStringOutputStream("stdout"); const stdout = createStringOutputStream('stdout')
const stderr = createStringOutputStream("stderr"); const stderr = createStringOutputStream('stderr')
container.modem.demuxStream(stream, stdout, stderr); container.modem.demuxStream(stream, stdout, stderr)
stream.on("error", err => logger.error({err, container_id: containerId}, "error reading from container stream")); stream.on('error', err =>
logger.error(
{ err, container_id: containerId },
'error reading from container stream'
)
)
return stream.on("end", () => attachStreamHandler(null, {stdout: stdout.data, stderr: stderr.data})); return stream.on('end', () =>
}); attachStreamHandler(null, { stdout: stdout.data, stderr: stderr.data })
)
})
}, },
waitForContainer(containerId, timeout, _callback) { waitForContainer(containerId, timeout, _callback) {
if (_callback == null) { _callback = function(error, exitCode) {}; } if (_callback == null) {
const callback = function(...args) { _callback = function(error, exitCode) {}
_callback(...Array.from(args || [])); }
// Only call the callback once const callback = function(...args) {
return _callback = function() {}; _callback(...Array.from(args || []))
}; // Only call the callback once
return (_callback = function() {})
const container = dockerode.getContainer(containerId);
let timedOut = false;
const timeoutId = setTimeout(function() {
timedOut = true;
logger.log({container_id: containerId}, "timeout reached, killing container");
return container.kill(function() {});
} }
, timeout);
logger.log({container_id: containerId}, "waiting for docker container"); const container = dockerode.getContainer(containerId)
let timedOut = false
const timeoutId = setTimeout(function() {
timedOut = true
logger.log(
{ container_id: containerId },
'timeout reached, killing container'
)
return container.kill(function() {})
}, timeout)
logger.log({ container_id: containerId }, 'waiting for docker container')
return container.wait(function(error, res) { return container.wait(function(error, res) {
if (error != null) { if (error != null) {
clearTimeout(timeoutId); clearTimeout(timeoutId)
logger.error({err: error, container_id: containerId}, "error waiting for container"); logger.error(
return callback(error); { err: error, container_id: containerId },
'error waiting for container'
)
return callback(error)
} }
if (timedOut) { if (timedOut) {
logger.log({containerId}, "docker container timed out"); logger.log({ containerId }, 'docker container timed out')
error = DockerRunner.ERR_TIMED_OUT; error = DockerRunner.ERR_TIMED_OUT
error.timedout = true; error.timedout = true
return callback(error); return callback(error)
} else { } else {
clearTimeout(timeoutId); clearTimeout(timeoutId)
logger.log({container_id: containerId, exitCode: res.StatusCode}, "docker container returned"); logger.log(
return callback(null, res.StatusCode); { container_id: containerId, exitCode: res.StatusCode },
'docker container returned'
)
return callback(null, res.StatusCode)
} }
}); })
}, },
destroyContainer(containerName, containerId, shouldForce, callback) { destroyContainer(containerName, containerId, shouldForce, callback) {
@@ -398,86 +543,138 @@ module.exports = (DockerRunner = {
// async exception, but if you delete by id it just does a normal // async exception, but if you delete by id it just does a normal
// error callback. We fall back to deleting by name if no id is // error callback. We fall back to deleting by name if no id is
// supplied. // supplied.
if (callback == null) { callback = function(error) {}; } if (callback == null) {
return LockManager.runWithLock(containerName, releaseLock => DockerRunner._destroyContainer(containerId || containerName, shouldForce, releaseLock) callback = function(error) {}
, callback); }
return LockManager.runWithLock(
containerName,
releaseLock =>
DockerRunner._destroyContainer(
containerId || containerName,
shouldForce,
releaseLock
),
callback
)
}, },
_destroyContainer(containerId, shouldForce, callback) { _destroyContainer(containerId, shouldForce, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
logger.log({container_id: containerId}, "destroying docker container"); callback = function(error) {}
const container = dockerode.getContainer(containerId); }
logger.log({ container_id: containerId }, 'destroying docker container')
const container = dockerode.getContainer(containerId)
return container.remove({ force: shouldForce === true }, function(error) { return container.remove({ force: shouldForce === true }, function(error) {
if ((error != null) && ((error != null ? error.statusCode : undefined) === 404)) { if (
logger.warn({err: error, container_id: containerId}, "container not found, continuing"); error != null &&
error = null; (error != null ? error.statusCode : undefined) === 404
) {
logger.warn(
{ err: error, container_id: containerId },
'container not found, continuing'
)
error = null
} }
if (error != null) { if (error != null) {
logger.error({err: error, container_id: containerId}, "error destroying container"); logger.error(
{ err: error, container_id: containerId },
'error destroying container'
)
} else { } else {
logger.log({container_id: containerId}, "destroyed container"); logger.log({ container_id: containerId }, 'destroyed container')
} }
return callback(error); return callback(error)
}); })
}, },
// handle expiry of docker containers // handle expiry of docker containers
MAX_CONTAINER_AGE: Settings.clsi.docker.maxContainerAge || (oneHour = 60 * 60 * 1000), MAX_CONTAINER_AGE:
Settings.clsi.docker.maxContainerAge || (oneHour = 60 * 60 * 1000),
examineOldContainer(container, callback) { examineOldContainer(container, callback) {
if (callback == null) { callback = function(error, name, id, ttl){}; } if (callback == null) {
const name = container.Name || (container.Names != null ? container.Names[0] : undefined); callback = function(error, name, id, ttl) {}
const created = container.Created * 1000; // creation time is returned in seconds }
const now = Date.now(); const name =
const age = now - created; container.Name ||
const maxAge = DockerRunner.MAX_CONTAINER_AGE; (container.Names != null ? container.Names[0] : undefined)
const ttl = maxAge - age; const created = container.Created * 1000 // creation time is returned in seconds
logger.log({containerName: name, created, now, age, maxAge, ttl}, "checking whether to destroy container"); const now = Date.now()
return callback(null, name, container.Id, ttl); const age = now - created
const maxAge = DockerRunner.MAX_CONTAINER_AGE
const ttl = maxAge - age
logger.log(
{ containerName: name, created, now, age, maxAge, ttl },
'checking whether to destroy container'
)
return callback(null, name, container.Id, ttl)
}, },
destroyOldContainers(callback) { destroyOldContainers(callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
return dockerode.listContainers({all: true}, function(error, containers) { callback = function(error) {}
if (error != null) { return callback(error); }
const jobs = [];
for (const container of Array.from(containers || [])) {
(container =>
DockerRunner.examineOldContainer(container, function(err, name, id, ttl) {
if ((name.slice(0, 9) === '/project-') && (ttl <= 0)) {
return jobs.push(cb => DockerRunner.destroyContainer(name, id, false, () => cb()));
} }
}) return dockerode.listContainers({ all: true }, function(error, containers) {
)(container); if (error != null) {
return callback(error)
}
const jobs = []
for (const container of Array.from(containers || [])) {
;(container =>
DockerRunner.examineOldContainer(container, function(
err,
name,
id,
ttl
) {
if (name.slice(0, 9) === '/project-' && ttl <= 0) {
return jobs.push(cb =>
DockerRunner.destroyContainer(name, id, false, () => cb())
)
}
}))(container)
} }
// Ignore errors because some containers get stuck but // Ignore errors because some containers get stuck but
// will be destroyed next time // will be destroyed next time
return async.series(jobs, callback); return async.series(jobs, callback)
}); })
}, },
startContainerMonitor() { startContainerMonitor() {
logger.log({maxAge: DockerRunner.MAX_CONTAINER_AGE}, "starting container expiry"); logger.log(
{ maxAge: DockerRunner.MAX_CONTAINER_AGE },
'starting container expiry'
)
// randomise the start time // randomise the start time
const randomDelay = Math.floor(Math.random() * 5 * 60 * 1000); const randomDelay = Math.floor(Math.random() * 5 * 60 * 1000)
return setTimeout(() => return setTimeout(
setInterval(() => DockerRunner.destroyOldContainers() () =>
, (oneHour = 60 * 60 * 1000)) setInterval(
() => DockerRunner.destroyOldContainers(),
(oneHour = 60 * 60 * 1000)
),
, randomDelay); randomDelay
)
}
} }
});
DockerRunner.startContainerMonitor(); DockerRunner.startContainerMonitor()
function __guard__(value, transform) { function __guard__(value, transform) {
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
} }
function __guardMethod__(obj, methodName, transform) { function __guardMethod__(obj, methodName, transform) {
if (typeof obj !== 'undefined' && obj !== null && typeof obj[methodName] === 'function') { if (
return transform(obj, methodName); typeof obj !== 'undefined' &&
obj !== null &&
typeof obj[methodName] === 'function'
) {
return transform(obj, methodName)
} else { } else {
return undefined; return undefined
} }
} }

View File

@@ -11,34 +11,47 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let DraftModeManager; let DraftModeManager
const fs = require("fs"); const fs = require('fs')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
module.exports = (DraftModeManager = { module.exports = DraftModeManager = {
injectDraftMode(filename, callback) { injectDraftMode(filename, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
return fs.readFile(filename, "utf8", function(error, content) { callback = function(error) {}
if (error != null) { return callback(error); }
// avoid adding draft mode more than once
if ((content != null ? content.indexOf("\\documentclass\[draft") : undefined) >= 0) {
return callback();
} }
const modified_content = DraftModeManager._injectDraftOption(content); return fs.readFile(filename, 'utf8', function(error, content) {
logger.log({ if (error != null) {
return callback(error)
}
// avoid adding draft mode more than once
if (
(content != null
? content.indexOf('\\documentclass[draft')
: undefined) >= 0
) {
return callback()
}
const modified_content = DraftModeManager._injectDraftOption(content)
logger.log(
{
content: content.slice(0, 1024), // \documentclass is normally v near the top content: content.slice(0, 1024), // \documentclass is normally v near the top
modified_content: modified_content.slice(0, 1024), modified_content: modified_content.slice(0, 1024),
filename filename
}, "injected draft class"); },
return fs.writeFile(filename, modified_content, callback); 'injected draft class'
}); )
return fs.writeFile(filename, modified_content, callback)
})
}, },
_injectDraftOption(content) { _injectDraftOption(content) {
return content return (
content
// With existing options (must be first, otherwise both are applied) // With existing options (must be first, otherwise both are applied)
.replace(/\\documentclass\[/g, "\\documentclass[draft,") .replace(/\\documentclass\[/g, '\\documentclass[draft,')
// Without existing options // Without existing options
.replace(/\\documentclass\{/g, "\\documentclass[draft]{"); .replace(/\\documentclass\{/g, '\\documentclass[draft]{')
)
}
} }
});

View File

@@ -4,33 +4,33 @@
*/ */
// TODO: This file was created by bulk-decaffeinate. // TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint. // Fix any style issues and re-enable lint.
let Errors; let Errors
var NotFoundError = function(message) { var NotFoundError = function(message) {
const error = new Error(message); const error = new Error(message)
error.name = "NotFoundError"; error.name = 'NotFoundError'
error.__proto__ = NotFoundError.prototype; error.__proto__ = NotFoundError.prototype
return error; return error
}; }
NotFoundError.prototype.__proto__ = Error.prototype; NotFoundError.prototype.__proto__ = Error.prototype
var FilesOutOfSyncError = function(message) { var FilesOutOfSyncError = function(message) {
const error = new Error(message); const error = new Error(message)
error.name = "FilesOutOfSyncError"; error.name = 'FilesOutOfSyncError'
error.__proto__ = FilesOutOfSyncError.prototype; error.__proto__ = FilesOutOfSyncError.prototype
return error; return error
}; }
FilesOutOfSyncError.prototype.__proto__ = Error.prototype; FilesOutOfSyncError.prototype.__proto__ = Error.prototype
var AlreadyCompilingError = function(message) { var AlreadyCompilingError = function(message) {
const error = new Error(message); const error = new Error(message)
error.name = "AlreadyCompilingError"; error.name = 'AlreadyCompilingError'
error.__proto__ = AlreadyCompilingError.prototype; error.__proto__ = AlreadyCompilingError.prototype
return error; return error
}; }
AlreadyCompilingError.prototype.__proto__ = Error.prototype; AlreadyCompilingError.prototype.__proto__ = Error.prototype
module.exports = (Errors = { module.exports = Errors = {
NotFoundError, NotFoundError,
FilesOutOfSyncError, FilesOutOfSyncError,
AlreadyCompilingError AlreadyCompilingError
}); }

View File

@@ -13,119 +13,192 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let LatexRunner; let LatexRunner
const Path = require("path"); const Path = require('path')
const Settings = require("settings-sharelatex"); const Settings = require('settings-sharelatex')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const Metrics = require("./Metrics"); const Metrics = require('./Metrics')
const CommandRunner = require("./CommandRunner"); const CommandRunner = require('./CommandRunner')
const ProcessTable = {}; // table of currently running jobs (pids or docker container names) const ProcessTable = {} // table of currently running jobs (pids or docker container names)
module.exports = (LatexRunner = { module.exports = LatexRunner = {
runLatex(project_id, options, callback) { runLatex(project_id, options, callback) {
let command; let command
if (callback == null) { callback = function(error) {}; } if (callback == null) {
let {directory, mainFile, compiler, timeout, image, environment, flags} = options; callback = function(error) {}
if (!compiler) { compiler = "pdflatex"; } }
if (!timeout) { timeout = 60000; } // milliseconds let {
directory,
mainFile,
compiler,
timeout,
image,
environment,
flags
} = options
if (!compiler) {
compiler = 'pdflatex'
}
if (!timeout) {
timeout = 60000
} // milliseconds
logger.log({directory, compiler, timeout, mainFile, environment, flags}, "starting compile"); logger.log(
{ directory, compiler, timeout, mainFile, environment, flags },
'starting compile'
)
// We want to run latexmk on the tex file which we will automatically // We want to run latexmk on the tex file which we will automatically
// generate from the Rtex/Rmd/md file. // generate from the Rtex/Rmd/md file.
mainFile = mainFile.replace(/\.(Rtex|md|Rmd)$/, ".tex"); mainFile = mainFile.replace(/\.(Rtex|md|Rmd)$/, '.tex')
if (compiler === "pdflatex") { if (compiler === 'pdflatex') {
command = LatexRunner._pdflatexCommand(mainFile, flags); command = LatexRunner._pdflatexCommand(mainFile, flags)
} else if (compiler === "latex") { } else if (compiler === 'latex') {
command = LatexRunner._latexCommand(mainFile, flags); command = LatexRunner._latexCommand(mainFile, flags)
} else if (compiler === "xelatex") { } else if (compiler === 'xelatex') {
command = LatexRunner._xelatexCommand(mainFile, flags); command = LatexRunner._xelatexCommand(mainFile, flags)
} else if (compiler === "lualatex") { } else if (compiler === 'lualatex') {
command = LatexRunner._lualatexCommand(mainFile, flags); command = LatexRunner._lualatexCommand(mainFile, flags)
} else { } else {
return callback(new Error(`unknown compiler: ${compiler}`)); return callback(new Error(`unknown compiler: ${compiler}`))
} }
if (Settings.clsi != null ? Settings.clsi.strace : undefined) { if (Settings.clsi != null ? Settings.clsi.strace : undefined) {
command = ["strace", "-o", "strace", "-ff"].concat(command); command = ['strace', '-o', 'strace', '-ff'].concat(command)
} }
const id = `${project_id}`; // record running project under this id const id = `${project_id}` // record running project under this id
return ProcessTable[id] = CommandRunner.run(project_id, command, directory, image, timeout, environment, function(error, output) { return (ProcessTable[id] = CommandRunner.run(
delete ProcessTable[id]; project_id,
if (error != null) { return callback(error); } command,
const runs = __guard__(__guard__(output != null ? output.stderr : undefined, x1 => x1.match(/^Run number \d+ of .*latex/mg)), x => x.length) || 0; directory,
const failed = (__guard__(output != null ? output.stdout : undefined, x2 => x2.match(/^Latexmk: Errors/m)) != null) ? 1 : 0; image,
timeout,
environment,
function(error, output) {
delete ProcessTable[id]
if (error != null) {
return callback(error)
}
const runs =
__guard__(
__guard__(output != null ? output.stderr : undefined, x1 =>
x1.match(/^Run number \d+ of .*latex/gm)
),
x => x.length
) || 0
const failed =
__guard__(output != null ? output.stdout : undefined, x2 =>
x2.match(/^Latexmk: Errors/m)
) != null
? 1
: 0
// counters from latexmk output // counters from latexmk output
const stats = {}; const stats = {}
stats["latexmk-errors"] = failed; stats['latexmk-errors'] = failed
stats["latex-runs"] = runs; stats['latex-runs'] = runs
stats["latex-runs-with-errors"] = failed ? runs : 0; stats['latex-runs-with-errors'] = failed ? runs : 0
stats[`latex-runs-${runs}`] = 1; stats[`latex-runs-${runs}`] = 1
stats[`latex-runs-with-errors-${runs}`] = failed ? 1 : 0; stats[`latex-runs-with-errors-${runs}`] = failed ? 1 : 0
// timing information from /usr/bin/time // timing information from /usr/bin/time
const timings = {}; const timings = {}
const stderr = output != null ? output.stderr : undefined; const stderr = output != null ? output.stderr : undefined
timings["cpu-percent"] = __guard__(stderr != null ? stderr.match(/Percent of CPU this job got: (\d+)/m) : undefined, x3 => x3[1]) || 0; timings['cpu-percent'] =
timings["cpu-time"] = __guard__(stderr != null ? stderr.match(/User time.*: (\d+.\d+)/m) : undefined, x4 => x4[1]) || 0; __guard__(
timings["sys-time"] = __guard__(stderr != null ? stderr.match(/System time.*: (\d+.\d+)/m) : undefined, x5 => x5[1]) || 0; stderr != null
return callback(error, output, stats, timings); ? stderr.match(/Percent of CPU this job got: (\d+)/m)
}); : undefined,
x3 => x3[1]
) || 0
timings['cpu-time'] =
__guard__(
stderr != null
? stderr.match(/User time.*: (\d+.\d+)/m)
: undefined,
x4 => x4[1]
) || 0
timings['sys-time'] =
__guard__(
stderr != null
? stderr.match(/System time.*: (\d+.\d+)/m)
: undefined,
x5 => x5[1]
) || 0
return callback(error, output, stats, timings)
}
))
}, },
killLatex(project_id, callback) { killLatex(project_id, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
const id = `${project_id}`; callback = function(error) {}
logger.log({id}, "killing running compile"); }
if ((ProcessTable[id] == null)) { const id = `${project_id}`
logger.warn({id}, "no such project to kill"); logger.log({ id }, 'killing running compile')
return callback(null); if (ProcessTable[id] == null) {
logger.warn({ id }, 'no such project to kill')
return callback(null)
} else { } else {
return CommandRunner.kill(ProcessTable[id], callback); return CommandRunner.kill(ProcessTable[id], callback)
} }
}, },
_latexmkBaseCommand(flags) { _latexmkBaseCommand(flags) {
let args = ["latexmk", "-cd", "-f", "-jobname=output", "-auxdir=$COMPILE_DIR", "-outdir=$COMPILE_DIR", "-synctex=1","-interaction=batchmode"]; let args = [
'latexmk',
'-cd',
'-f',
'-jobname=output',
'-auxdir=$COMPILE_DIR',
'-outdir=$COMPILE_DIR',
'-synctex=1',
'-interaction=batchmode'
]
if (flags) { if (flags) {
args = args.concat(flags); args = args.concat(flags)
} }
return (__guard__(Settings != null ? Settings.clsi : undefined, x => x.latexmkCommandPrefix) || []).concat(args); return (
__guard__(
Settings != null ? Settings.clsi : undefined,
x => x.latexmkCommandPrefix
) || []
).concat(args)
}, },
_pdflatexCommand(mainFile, flags) { _pdflatexCommand(mainFile, flags) {
return LatexRunner._latexmkBaseCommand(flags).concat([ return LatexRunner._latexmkBaseCommand(flags).concat([
"-pdf", '-pdf',
Path.join("$COMPILE_DIR", mainFile) Path.join('$COMPILE_DIR', mainFile)
]); ])
}, },
_latexCommand(mainFile, flags) { _latexCommand(mainFile, flags) {
return LatexRunner._latexmkBaseCommand(flags).concat([ return LatexRunner._latexmkBaseCommand(flags).concat([
"-pdfdvi", '-pdfdvi',
Path.join("$COMPILE_DIR", mainFile) Path.join('$COMPILE_DIR', mainFile)
]); ])
}, },
_xelatexCommand(mainFile, flags) { _xelatexCommand(mainFile, flags) {
return LatexRunner._latexmkBaseCommand(flags).concat([ return LatexRunner._latexmkBaseCommand(flags).concat([
"-xelatex", '-xelatex',
Path.join("$COMPILE_DIR", mainFile) Path.join('$COMPILE_DIR', mainFile)
]); ])
}, },
_lualatexCommand(mainFile, flags) { _lualatexCommand(mainFile, flags) {
return LatexRunner._latexmkBaseCommand(flags).concat([ return LatexRunner._latexmkBaseCommand(flags).concat([
"-lualatex", '-lualatex',
Path.join("$COMPILE_DIR", mainFile) Path.join('$COMPILE_DIR', mainFile)
]); ])
}
} }
});
function __guard__(value, transform) { function __guard__(value, transform) {
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
} }

View File

@@ -13,62 +13,79 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let CommandRunner; let CommandRunner
const { spawn } = require("child_process"); const { spawn } = require('child_process')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
logger.info("using standard command runner"); logger.info('using standard command runner')
module.exports = (CommandRunner = { module.exports = CommandRunner = {
run(project_id, command, directory, image, timeout, environment, callback) { run(project_id, command, directory, image, timeout, environment, callback) {
let key, value; let key, value
if (callback == null) { callback = function(error) {}; } if (callback == null) {
command = (Array.from(command).map((arg) => arg.toString().replace('$COMPILE_DIR', directory))); callback = function(error) {}
logger.log({project_id, command, directory}, "running command"); }
logger.warn("timeouts and sandboxing are not enabled with CommandRunner"); command = Array.from(command).map(arg =>
arg.toString().replace('$COMPILE_DIR', directory)
)
logger.log({ project_id, command, directory }, 'running command')
logger.warn('timeouts and sandboxing are not enabled with CommandRunner')
// merge environment settings // merge environment settings
const env = {}; const env = {}
for (key in process.env) { value = process.env[key]; env[key] = value; } for (key in process.env) {
for (key in environment) { value = environment[key]; env[key] = value; } value = process.env[key]
env[key] = value
}
for (key in environment) {
value = environment[key]
env[key] = value
}
// run command as detached process so it has its own process group (which can be killed if needed) // run command as detached process so it has its own process group (which can be killed if needed)
const proc = spawn(command[0], command.slice(1), {cwd: directory, env}); const proc = spawn(command[0], command.slice(1), { cwd: directory, env })
let stdout = ""; let stdout = ''
proc.stdout.on("data", data=> stdout += data); proc.stdout.on('data', data => (stdout += data))
proc.on("error", function(err){ proc.on('error', function(err) {
logger.err({err, project_id, command, directory}, "error running command"); logger.err(
return callback(err); { err, project_id, command, directory },
}); 'error running command'
)
return callback(err)
})
proc.on("close", function(code, signal) { proc.on('close', function(code, signal) {
let err; let err
logger.info({code, signal, project_id}, "command exited"); logger.info({ code, signal, project_id }, 'command exited')
if (signal === 'SIGTERM') { // signal from kill method below if (signal === 'SIGTERM') {
err = new Error("terminated"); // signal from kill method below
err.terminated = true; err = new Error('terminated')
return callback(err); err.terminated = true
} else if (code === 1) { // exit status from chktex return callback(err)
err = new Error("exited"); } else if (code === 1) {
err.code = code; // exit status from chktex
return callback(err); err = new Error('exited')
err.code = code
return callback(err)
} else { } else {
return callback(null, {"stdout": stdout}); return callback(null, { stdout: stdout })
} }
}); })
return proc.pid; return proc.pid
}, // return process id to allow job to be killed if necessary }, // return process id to allow job to be killed if necessary
kill(pid, callback) { kill(pid, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
callback = function(error) {}
}
try { try {
process.kill(-pid); // kill all processes in group process.kill(-pid) // kill all processes in group
} catch (err) { } catch (err) {
return callback(err); return callback(err)
}
return callback()
} }
return callback();
} }
});

View File

@@ -11,46 +11,62 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let LockManager; let LockManager
const Settings = require('settings-sharelatex'); const Settings = require('settings-sharelatex')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const Lockfile = require('lockfile'); // from https://github.com/npm/lockfile const Lockfile = require('lockfile') // from https://github.com/npm/lockfile
const Errors = require("./Errors"); const Errors = require('./Errors')
const fs = require("fs"); const fs = require('fs')
const Path = require("path"); const Path = require('path')
module.exports = (LockManager = { module.exports = LockManager = {
LOCK_TEST_INTERVAL: 1000, // 50ms between each test of the lock LOCK_TEST_INTERVAL: 1000, // 50ms between each test of the lock
MAX_LOCK_WAIT_TIME: 15000, // 10s maximum time to spend trying to get the lock MAX_LOCK_WAIT_TIME: 15000, // 10s maximum time to spend trying to get the lock
LOCK_STALE: 5 * 60 * 1000, // 5 mins time until lock auto expires LOCK_STALE: 5 * 60 * 1000, // 5 mins time until lock auto expires
runWithLock(path, runner, callback) { runWithLock(path, runner, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
callback = function(error) {}
}
const lockOpts = { const lockOpts = {
wait: this.MAX_LOCK_WAIT_TIME, wait: this.MAX_LOCK_WAIT_TIME,
pollPeriod: this.LOCK_TEST_INTERVAL, pollPeriod: this.LOCK_TEST_INTERVAL,
stale: this.LOCK_STALE stale: this.LOCK_STALE
}; }
return Lockfile.lock(path, lockOpts, function(error) { return Lockfile.lock(path, lockOpts, function(error) {
if ((error != null ? error.code : undefined) === 'EEXIST') { if ((error != null ? error.code : undefined) === 'EEXIST') {
return callback(new Errors.AlreadyCompilingError("compile in progress")); return callback(new Errors.AlreadyCompilingError('compile in progress'))
} else if (error != null) { } else if (error != null) {
return fs.lstat(path, (statLockErr, statLock) => return fs.lstat(path, (statLockErr, statLock) =>
fs.lstat(Path.dirname(path), (statDirErr, statDir) => fs.lstat(Path.dirname(path), (statDirErr, statDir) =>
fs.readdir(Path.dirname(path), function(readdirErr, readdirDir) { fs.readdir(Path.dirname(path), function(readdirErr, readdirDir) {
logger.err({error, path, statLock, statLockErr, statDir, statDirErr, readdirErr, readdirDir}, "unable to get lock"); logger.err(
return callback(error); {
error,
path,
statLock,
statLockErr,
statDir,
statDirErr,
readdirErr,
readdirDir
},
'unable to get lock'
)
return callback(error)
}) })
) )
); )
} else { } else {
return runner((error1, ...args) => return runner((error1, ...args) =>
Lockfile.unlock(path, function(error2) { Lockfile.unlock(path, function(error2) {
error = error1 || error2; error = error1 || error2
if (error != null) { return callback(error); } if (error != null) {
return callback(null, ...Array.from(args)); return callback(error)
}
return callback(null, ...Array.from(args))
})
)
}
}) })
);
} }
});
} }
});

View File

@@ -1,4 +1,3 @@
// TODO: This file was created by bulk-decaffeinate. // TODO: This file was created by bulk-decaffeinate.
// Sanity-check the conversion and remove this comment. // Sanity-check the conversion and remove this comment.
module.exports = require("metrics-sharelatex"); module.exports = require('metrics-sharelatex')

View File

@@ -13,19 +13,19 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let OutputCacheManager; let OutputCacheManager
const async = require("async"); const async = require('async')
const fs = require("fs"); const fs = require('fs')
const fse = require("fs-extra"); const fse = require('fs-extra')
const Path = require("path"); const Path = require('path')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const _ = require("underscore"); const _ = require('underscore')
const Settings = require("settings-sharelatex"); const Settings = require('settings-sharelatex')
const crypto = require("crypto"); const crypto = require('crypto')
const OutputFileOptimiser = require("./OutputFileOptimiser"); const OutputFileOptimiser = require('./OutputFileOptimiser')
module.exports = (OutputCacheManager = { module.exports = OutputCacheManager = {
CACHE_SUBDIR: '.cache/clsi', CACHE_SUBDIR: '.cache/clsi',
ARCHIVE_SUBDIR: '.archive/clsi', ARCHIVE_SUBDIR: '.archive/clsi',
// build id is HEXDATE-HEXRANDOM from Date.now()and RandomBytes // build id is HEXDATE-HEXRANDOM from Date.now()and RandomBytes
@@ -37,239 +37,363 @@ module.exports = (OutputCacheManager = {
path(buildId, file) { path(buildId, file) {
// used by static server, given build id return '.cache/clsi/buildId' // used by static server, given build id return '.cache/clsi/buildId'
if (buildId.match(OutputCacheManager.BUILD_REGEX)) { if (buildId.match(OutputCacheManager.BUILD_REGEX)) {
return Path.join(OutputCacheManager.CACHE_SUBDIR, buildId, file); return Path.join(OutputCacheManager.CACHE_SUBDIR, buildId, file)
} else { } else {
// for invalid build id, return top level // for invalid build id, return top level
return file; return file
} }
}, },
generateBuildId(callback) { generateBuildId(callback) {
// generate a secure build id from Date.now() and 8 random bytes in hex // generate a secure build id from Date.now() and 8 random bytes in hex
if (callback == null) { callback = function(error, buildId) {}; } if (callback == null) {
callback = function(error, buildId) {}
}
return crypto.randomBytes(8, function(err, buf) { return crypto.randomBytes(8, function(err, buf) {
if (err != null) { return callback(err); } if (err != null) {
const random = buf.toString('hex'); return callback(err)
const date = Date.now().toString(16); }
return callback(err, `${date}-${random}`); const random = buf.toString('hex')
}); const date = Date.now().toString(16)
return callback(err, `${date}-${random}`)
})
}, },
saveOutputFiles(outputFiles, compileDir, callback) { saveOutputFiles(outputFiles, compileDir, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
callback = function(error) {}
}
return OutputCacheManager.generateBuildId(function(err, buildId) { return OutputCacheManager.generateBuildId(function(err, buildId) {
if (err != null) { return callback(err); } if (err != null) {
return OutputCacheManager.saveOutputFilesInBuildDir(outputFiles, compileDir, buildId, callback); return callback(err)
}); }
return OutputCacheManager.saveOutputFilesInBuildDir(
outputFiles,
compileDir,
buildId,
callback
)
})
}, },
saveOutputFilesInBuildDir(outputFiles, compileDir, buildId, callback) { saveOutputFilesInBuildDir(outputFiles, compileDir, buildId, callback) {
// make a compileDir/CACHE_SUBDIR/build_id directory and // make a compileDir/CACHE_SUBDIR/build_id directory and
// copy all the output files into it // copy all the output files into it
if (callback == null) { callback = function(error) {}; } if (callback == null) {
const cacheRoot = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR); callback = function(error) {}
}
const cacheRoot = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR)
// Put the files into a new cache subdirectory // Put the files into a new cache subdirectory
const cacheDir = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR, buildId); const cacheDir = Path.join(
compileDir,
OutputCacheManager.CACHE_SUBDIR,
buildId
)
// Is it a per-user compile? check if compile directory is PROJECTID-USERID // Is it a per-user compile? check if compile directory is PROJECTID-USERID
const perUser = Path.basename(compileDir).match(/^[0-9a-f]{24}-[0-9a-f]{24}$/); const perUser = Path.basename(compileDir).match(
/^[0-9a-f]{24}-[0-9a-f]{24}$/
)
// Archive logs in background // Archive logs in background
if ((Settings.clsi != null ? Settings.clsi.archive_logs : undefined) || (Settings.clsi != null ? Settings.clsi.strace : undefined)) { if (
OutputCacheManager.archiveLogs(outputFiles, compileDir, buildId, function(err) { (Settings.clsi != null ? Settings.clsi.archive_logs : undefined) ||
(Settings.clsi != null ? Settings.clsi.strace : undefined)
) {
OutputCacheManager.archiveLogs(outputFiles, compileDir, buildId, function(
err
) {
if (err != null) { if (err != null) {
return logger.warn({err}, "erroring archiving log files"); return logger.warn({ err }, 'erroring archiving log files')
} }
}); })
} }
// make the new cache directory // make the new cache directory
return fse.ensureDir(cacheDir, function(err) { return fse.ensureDir(cacheDir, function(err) {
if (err != null) { if (err != null) {
logger.error({err, directory: cacheDir}, "error creating cache directory"); logger.error(
return callback(err, outputFiles); { err, directory: cacheDir },
'error creating cache directory'
)
return callback(err, outputFiles)
} else { } else {
// copy all the output files into the new cache directory // copy all the output files into the new cache directory
const results = []; const results = []
return async.mapSeries(outputFiles, function(file, cb) { return async.mapSeries(
outputFiles,
function(file, cb) {
// don't send dot files as output, express doesn't serve them // don't send dot files as output, express doesn't serve them
if (OutputCacheManager._fileIsHidden(file.path)) { if (OutputCacheManager._fileIsHidden(file.path)) {
logger.debug({compileDir, path: file.path}, "ignoring dotfile in output"); logger.debug(
return cb(); { compileDir, path: file.path },
'ignoring dotfile in output'
)
return cb()
} }
// copy other files into cache directory if valid // copy other files into cache directory if valid
const newFile = _.clone(file); const newFile = _.clone(file)
const [src, dst] = Array.from([Path.join(compileDir, file.path), Path.join(cacheDir, file.path)]); const [src, dst] = Array.from([
return OutputCacheManager._checkFileIsSafe(src, function(err, isSafe) { Path.join(compileDir, file.path),
if (err != null) { return cb(err); } Path.join(cacheDir, file.path)
if (!isSafe) { ])
return cb(); return OutputCacheManager._checkFileIsSafe(src, function(
err,
isSafe
) {
if (err != null) {
return cb(err)
}
if (!isSafe) {
return cb()
}
return OutputCacheManager._checkIfShouldCopy(src, function(
err,
shouldCopy
) {
if (err != null) {
return cb(err)
} }
return OutputCacheManager._checkIfShouldCopy(src, function(err, shouldCopy) {
if (err != null) { return cb(err); }
if (!shouldCopy) { if (!shouldCopy) {
return cb(); return cb()
} }
return OutputCacheManager._copyFile(src, dst, function(err) { return OutputCacheManager._copyFile(src, dst, function(err) {
if (err != null) { return cb(err); } if (err != null) {
newFile.build = buildId; // attach a build id if we cached the file return cb(err)
results.push(newFile);
return cb();
});
});
});
} }
, function(err) { newFile.build = buildId // attach a build id if we cached the file
results.push(newFile)
return cb()
})
})
})
},
function(err) {
if (err != null) { if (err != null) {
// pass back the original files if we encountered *any* error // pass back the original files if we encountered *any* error
callback(err, outputFiles); callback(err, outputFiles)
// clean up the directory we just created // clean up the directory we just created
return fse.remove(cacheDir, function(err) { return fse.remove(cacheDir, function(err) {
if (err != null) { if (err != null) {
return logger.error({err, dir: cacheDir}, "error removing cache dir after failure"); return logger.error(
{ err, dir: cacheDir },
'error removing cache dir after failure'
)
} }
}); })
} else { } else {
// pass back the list of new files in the cache // pass back the list of new files in the cache
callback(err, results); callback(err, results)
// let file expiry run in the background, expire all previous files if per-user // let file expiry run in the background, expire all previous files if per-user
return OutputCacheManager.expireOutputFiles(cacheRoot, {keep: buildId, limit: perUser ? 1 : null}); return OutputCacheManager.expireOutputFiles(cacheRoot, {
keep: buildId,
limit: perUser ? 1 : null
})
} }
});
} }
}); )
}
})
}, },
archiveLogs(outputFiles, compileDir, buildId, callback) { archiveLogs(outputFiles, compileDir, buildId, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
const archiveDir = Path.join(compileDir, OutputCacheManager.ARCHIVE_SUBDIR, buildId); callback = function(error) {}
logger.log({dir: archiveDir}, "archiving log files for project");
return fse.ensureDir(archiveDir, function(err) {
if (err != null) { return callback(err); }
return async.mapSeries(outputFiles, function(file, cb) {
const [src, dst] = Array.from([Path.join(compileDir, file.path), Path.join(archiveDir, file.path)]);
return OutputCacheManager._checkFileIsSafe(src, function(err, isSafe) {
if (err != null) { return cb(err); }
if (!isSafe) { return cb(); }
return OutputCacheManager._checkIfShouldArchive(src, function(err, shouldArchive) {
if (err != null) { return cb(err); }
if (!shouldArchive) { return cb(); }
return OutputCacheManager._copyFile(src, dst, cb);
});
});
} }
, callback); const archiveDir = Path.join(
}); compileDir,
OutputCacheManager.ARCHIVE_SUBDIR,
buildId
)
logger.log({ dir: archiveDir }, 'archiving log files for project')
return fse.ensureDir(archiveDir, function(err) {
if (err != null) {
return callback(err)
}
return async.mapSeries(
outputFiles,
function(file, cb) {
const [src, dst] = Array.from([
Path.join(compileDir, file.path),
Path.join(archiveDir, file.path)
])
return OutputCacheManager._checkFileIsSafe(src, function(
err,
isSafe
) {
if (err != null) {
return cb(err)
}
if (!isSafe) {
return cb()
}
return OutputCacheManager._checkIfShouldArchive(src, function(
err,
shouldArchive
) {
if (err != null) {
return cb(err)
}
if (!shouldArchive) {
return cb()
}
return OutputCacheManager._copyFile(src, dst, cb)
})
})
},
callback
)
})
}, },
expireOutputFiles(cacheRoot, options, callback) { expireOutputFiles(cacheRoot, options, callback) {
// look in compileDir for build dirs and delete if > N or age of mod time > T // look in compileDir for build dirs and delete if > N or age of mod time > T
if (callback == null) { callback = function(error) {}; } if (callback == null) {
callback = function(error) {}
}
return fs.readdir(cacheRoot, function(err, results) { return fs.readdir(cacheRoot, function(err, results) {
if (err != null) { if (err != null) {
if (err.code === 'ENOENT') { return callback(null); } // cache directory is empty if (err.code === 'ENOENT') {
logger.error({err, project_id: cacheRoot}, "error clearing cache"); return callback(null)
return callback(err); } // cache directory is empty
logger.error({ err, project_id: cacheRoot }, 'error clearing cache')
return callback(err)
} }
const dirs = results.sort().reverse(); const dirs = results.sort().reverse()
const currentTime = Date.now(); const currentTime = Date.now()
const isExpired = function(dir, index) { const isExpired = function(dir, index) {
if ((options != null ? options.keep : undefined) === dir) { return false; } if ((options != null ? options.keep : undefined) === dir) {
return false
}
// remove any directories over the requested (non-null) limit // remove any directories over the requested (non-null) limit
if (((options != null ? options.limit : undefined) != null) && (index > options.limit)) { return true; } if (
(options != null ? options.limit : undefined) != null &&
index > options.limit
) {
return true
}
// remove any directories over the hard limit // remove any directories over the hard limit
if (index > OutputCacheManager.CACHE_LIMIT) { return true; } if (index > OutputCacheManager.CACHE_LIMIT) {
return true
}
// we can get the build time from the first part of the directory name DDDD-RRRR // we can get the build time from the first part of the directory name DDDD-RRRR
// DDDD is date and RRRR is random bytes // DDDD is date and RRRR is random bytes
const dirTime = parseInt(__guard__(dir.split('-'), x => x[0]), 16); const dirTime = parseInt(
const age = currentTime - dirTime; __guard__(dir.split('-'), x => x[0]),
return age > OutputCacheManager.CACHE_AGE; 16
}; )
const age = currentTime - dirTime
return age > OutputCacheManager.CACHE_AGE
}
const toRemove = _.filter(dirs, isExpired); const toRemove = _.filter(dirs, isExpired)
const removeDir = (dir, cb) => const removeDir = (dir, cb) =>
fse.remove(Path.join(cacheRoot, dir), function(err, result) { fse.remove(Path.join(cacheRoot, dir), function(err, result) {
logger.log({cache: cacheRoot, dir}, "removed expired cache dir"); logger.log({ cache: cacheRoot, dir }, 'removed expired cache dir')
if (err != null) { if (err != null) {
logger.error({err, dir}, "cache remove error"); logger.error({ err, dir }, 'cache remove error')
} }
return cb(err, result); return cb(err, result)
})
return async.eachSeries(
toRemove,
(dir, cb) => removeDir(dir, cb),
callback
)
}) })
;
return async.eachSeries(toRemove, (dir, cb) => removeDir(dir, cb)
, callback);
});
}, },
_fileIsHidden(path) { _fileIsHidden(path) {
return ((path != null ? path.match(/^\.|\/\./) : undefined) != null); return (path != null ? path.match(/^\.|\/\./) : undefined) != null
}, },
_checkFileIsSafe(src, callback) { _checkFileIsSafe(src, callback) {
// check if we have a valid file to copy into the cache // check if we have a valid file to copy into the cache
if (callback == null) { callback = function(error, isSafe) {}; } if (callback == null) {
callback = function(error, isSafe) {}
}
return fs.stat(src, function(err, stats) { return fs.stat(src, function(err, stats) {
if ((err != null ? err.code : undefined) === 'ENOENT') { if ((err != null ? err.code : undefined) === 'ENOENT') {
logger.warn({err, file: src}, "file has disappeared before copying to build cache"); logger.warn(
return callback(err, false); { err, file: src },
'file has disappeared before copying to build cache'
)
return callback(err, false)
} else if (err != null) { } else if (err != null) {
// some other problem reading the file // some other problem reading the file
logger.error({err, file: src}, "stat error for file in cache"); logger.error({ err, file: src }, 'stat error for file in cache')
return callback(err, false); return callback(err, false)
} else if (!stats.isFile()) { } else if (!stats.isFile()) {
// other filetype - reject it // other filetype - reject it
logger.warn({src, stat: stats}, "nonfile output - refusing to copy to cache"); logger.warn(
return callback(null, false); { src, stat: stats },
'nonfile output - refusing to copy to cache'
)
return callback(null, false)
} else { } else {
// it's a plain file, ok to copy // it's a plain file, ok to copy
return callback(null, true); return callback(null, true)
} }
}); })
}, },
_copyFile(src, dst, callback) { _copyFile(src, dst, callback) {
// copy output file into the cache // copy output file into the cache
return fse.copy(src, dst, function(err) { return fse.copy(src, dst, function(err) {
if ((err != null ? err.code : undefined) === 'ENOENT') { if ((err != null ? err.code : undefined) === 'ENOENT') {
logger.warn({err, file: src}, "file has disappeared when copying to build cache"); logger.warn(
return callback(err, false); { err, file: src },
'file has disappeared when copying to build cache'
)
return callback(err, false)
} else if (err != null) { } else if (err != null) {
logger.error({err, src, dst}, "copy error for file in cache"); logger.error({ err, src, dst }, 'copy error for file in cache')
return callback(err); return callback(err)
} else { } else {
if ((Settings.clsi != null ? Settings.clsi.optimiseInDocker : undefined)) { if (
Settings.clsi != null ? Settings.clsi.optimiseInDocker : undefined
) {
// don't run any optimisations on the pdf when they are done // don't run any optimisations on the pdf when they are done
// in the docker container // in the docker container
return callback(); return callback()
} else { } else {
// call the optimiser for the file too // call the optimiser for the file too
return OutputFileOptimiser.optimiseFile(src, dst, callback); return OutputFileOptimiser.optimiseFile(src, dst, callback)
} }
} }
}); })
}, },
_checkIfShouldCopy(src, callback) { _checkIfShouldCopy(src, callback) {
if (callback == null) { callback = function(err, shouldCopy) {}; } if (callback == null) {
return callback(null, !Path.basename(src).match(/^strace/)); callback = function(err, shouldCopy) {}
}
return callback(null, !Path.basename(src).match(/^strace/))
}, },
_checkIfShouldArchive(src, callback) { _checkIfShouldArchive(src, callback) {
let needle; let needle
if (callback == null) { callback = function(err, shouldCopy) {}; } if (callback == null) {
callback = function(err, shouldCopy) {}
}
if (Path.basename(src).match(/^strace/)) { if (Path.basename(src).match(/^strace/)) {
return callback(null, true); return callback(null, true)
} }
if ((Settings.clsi != null ? Settings.clsi.archive_logs : undefined) && (needle = Path.basename(src), ["output.log", "output.blg"].includes(needle))) { if (
return callback(null, true); (Settings.clsi != null ? Settings.clsi.archive_logs : undefined) &&
((needle = Path.basename(src)),
['output.log', 'output.blg'].includes(needle))
) {
return callback(null, true)
}
return callback(null, false)
} }
return callback(null, false);
} }
});
function __guard__(value, transform) { function __guard__(value, transform) {
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
} }

View File

@@ -14,73 +14,102 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let OutputFileFinder; let OutputFileFinder
const async = require("async"); const async = require('async')
const fs = require("fs"); const fs = require('fs')
const Path = require("path"); const Path = require('path')
const { spawn } = require("child_process"); const { spawn } = require('child_process')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
module.exports = (OutputFileFinder = { module.exports = OutputFileFinder = {
findOutputFiles(resources, directory, callback) { findOutputFiles(resources, directory, callback) {
if (callback == null) { callback = function(error, outputFiles, allFiles) {}; } if (callback == null) {
const incomingResources = {}; callback = function(error, outputFiles, allFiles) {}
}
const incomingResources = {}
for (const resource of Array.from(resources)) { for (const resource of Array.from(resources)) {
incomingResources[resource.path] = true; incomingResources[resource.path] = true
} }
return OutputFileFinder._getAllFiles(directory, function(error, allFiles) { return OutputFileFinder._getAllFiles(directory, function(error, allFiles) {
if (allFiles == null) { allFiles = []; } if (allFiles == null) {
if (error != null) { allFiles = []
logger.err({err:error}, "error finding all output files");
return callback(error);
} }
const outputFiles = []; if (error != null) {
logger.err({ err: error }, 'error finding all output files')
return callback(error)
}
const outputFiles = []
for (const file of Array.from(allFiles)) { for (const file of Array.from(allFiles)) {
if (!incomingResources[file]) { if (!incomingResources[file]) {
outputFiles.push({ outputFiles.push({
path: file, path: file,
type: __guard__(file.match(/\.([^\.]+)$/), x => x[1]) type: __guard__(file.match(/\.([^\.]+)$/), x => x[1])
}); })
} }
} }
return callback(null, outputFiles, allFiles); return callback(null, outputFiles, allFiles)
}); })
}, },
_getAllFiles(directory, _callback) { _getAllFiles(directory, _callback) {
if (_callback == null) { _callback = function(error, fileList) {}; } if (_callback == null) {
_callback = function(error, fileList) {}
}
const callback = function(error, fileList) { const callback = function(error, fileList) {
_callback(error, fileList); _callback(error, fileList)
return _callback = function() {}; return (_callback = function() {})
}; }
// don't include clsi-specific files/directories in the output list // don't include clsi-specific files/directories in the output list
const EXCLUDE_DIRS = ["-name", ".cache", "-o", "-name", ".archive","-o", "-name", ".project-*"]; const EXCLUDE_DIRS = [
const args = [directory, "(", ...Array.from(EXCLUDE_DIRS), ")", "-prune", "-o", "-type", "f", "-print"]; '-name',
logger.log({args}, "running find command"); '.cache',
'-o',
'-name',
'.archive',
'-o',
'-name',
'.project-*'
]
const args = [
directory,
'(',
...Array.from(EXCLUDE_DIRS),
')',
'-prune',
'-o',
'-type',
'f',
'-print'
]
logger.log({ args }, 'running find command')
const proc = spawn("find", args); const proc = spawn('find', args)
let stdout = ""; let stdout = ''
proc.stdout.on("data", chunk => stdout += chunk.toString()); proc.stdout.on('data', chunk => (stdout += chunk.toString()))
proc.on("error", callback); proc.on('error', callback)
return proc.on("close", function(code) { return proc.on('close', function(code) {
if (code !== 0) { if (code !== 0) {
logger.warn({directory, code}, "find returned error, directory likely doesn't exist"); logger.warn(
return callback(null, []); { directory, code },
"find returned error, directory likely doesn't exist"
)
return callback(null, [])
} }
let fileList = stdout.trim().split("\n"); let fileList = stdout.trim().split('\n')
fileList = fileList.map(function(file) { fileList = fileList.map(function(file) {
// Strip leading directory // Strip leading directory
let path; let path
return path = Path.relative(directory, file); return (path = Path.relative(directory, file))
}); })
return callback(null, fileList); return callback(null, fileList)
}); })
}
} }
});
function __guard__(value, transform) { function __guard__(value, transform) {
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
} }

View File

@@ -13,74 +13,92 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let OutputFileOptimiser; let OutputFileOptimiser
const fs = require("fs"); const fs = require('fs')
const Path = require("path"); const Path = require('path')
const { spawn } = require("child_process"); const { spawn } = require('child_process')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const Metrics = require("./Metrics"); const Metrics = require('./Metrics')
const _ = require("underscore"); const _ = require('underscore')
module.exports = (OutputFileOptimiser = {
module.exports = OutputFileOptimiser = {
optimiseFile(src, dst, callback) { optimiseFile(src, dst, callback) {
// check output file (src) and see if we can optimise it, storing // check output file (src) and see if we can optimise it, storing
// the result in the build directory (dst) // the result in the build directory (dst)
if (callback == null) { callback = function(error) {}; } if (callback == null) {
callback = function(error) {}
}
if (src.match(/\/output\.pdf$/)) { if (src.match(/\/output\.pdf$/)) {
return OutputFileOptimiser.checkIfPDFIsOptimised(src, function(err, isOptimised) { return OutputFileOptimiser.checkIfPDFIsOptimised(src, function(
if ((err != null) || isOptimised) { return callback(null); } err,
return OutputFileOptimiser.optimisePDF(src, dst, callback); isOptimised
}); ) {
if (err != null || isOptimised) {
return callback(null)
}
return OutputFileOptimiser.optimisePDF(src, dst, callback)
})
} else { } else {
return callback((null)); return callback(null)
} }
}, },
checkIfPDFIsOptimised(file, callback) { checkIfPDFIsOptimised(file, callback) {
const SIZE = 16*1024; // check the header of the pdf const SIZE = 16 * 1024 // check the header of the pdf
const result = new Buffer(SIZE); const result = new Buffer(SIZE)
result.fill(0); // prevent leakage of uninitialised buffer result.fill(0) // prevent leakage of uninitialised buffer
return fs.open(file, "r", function(err, fd) { return fs.open(file, 'r', function(err, fd) {
if (err != null) { return callback(err); } if (err != null) {
return callback(err)
}
return fs.read(fd, result, 0, SIZE, 0, (errRead, bytesRead, buffer) => return fs.read(fd, result, 0, SIZE, 0, (errRead, bytesRead, buffer) =>
fs.close(fd, function(errClose) { fs.close(fd, function(errClose) {
if (errRead != null) { return callback(errRead); } if (errRead != null) {
if (typeof errReadClose !== 'undefined' && errReadClose !== null) { return callback(errClose); } return callback(errRead)
const isOptimised = buffer.toString('ascii').indexOf("/Linearized 1") >= 0; }
return callback(null, isOptimised); if (typeof errReadClose !== 'undefined' && errReadClose !== null) {
return callback(errClose)
}
const isOptimised =
buffer.toString('ascii').indexOf('/Linearized 1') >= 0
return callback(null, isOptimised)
})
)
}) })
);
});
}, },
optimisePDF(src, dst, callback) { optimisePDF(src, dst, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
const tmpOutput = dst + '.opt'; callback = function(error) {}
const args = ["--linearize", src, tmpOutput]; }
logger.log({args}, "running qpdf command"); const tmpOutput = dst + '.opt'
const args = ['--linearize', src, tmpOutput]
logger.log({ args }, 'running qpdf command')
const timer = new Metrics.Timer("qpdf"); const timer = new Metrics.Timer('qpdf')
const proc = spawn("qpdf", args); const proc = spawn('qpdf', args)
let stdout = ""; let stdout = ''
proc.stdout.on("data", chunk => stdout += chunk.toString()); proc.stdout.on('data', chunk => (stdout += chunk.toString()))
callback = _.once(callback); // avoid double call back for error and close event callback = _.once(callback) // avoid double call back for error and close event
proc.on("error", function(err) { proc.on('error', function(err) {
logger.warn({err, args}, "qpdf failed"); logger.warn({ err, args }, 'qpdf failed')
return callback(null); return callback(null)
}); // ignore the error }) // ignore the error
return proc.on("close", function(code) { return proc.on('close', function(code) {
timer.done(); timer.done()
if (code !== 0) { if (code !== 0) {
logger.warn({code, args}, "qpdf returned error"); logger.warn({ code, args }, 'qpdf returned error')
return callback(null); // ignore the error return callback(null) // ignore the error
} }
return fs.rename(tmpOutput, dst, function(err) { return fs.rename(tmpOutput, dst, function(err) {
if (err != null) { if (err != null) {
logger.warn({tmpOutput, dst}, "failed to rename output of qpdf command"); logger.warn(
{ tmpOutput, dst },
'failed to rename output of qpdf command'
)
} }
return callback(null); return callback(null)
}); })
}); })
} // ignore the error } // ignore the error
}); }

View File

@@ -11,113 +11,153 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let ProjectPersistenceManager; let ProjectPersistenceManager
const UrlCache = require("./UrlCache"); const UrlCache = require('./UrlCache')
const CompileManager = require("./CompileManager"); const CompileManager = require('./CompileManager')
const db = require("./db"); const db = require('./db')
const dbQueue = require("./DbQueue"); const dbQueue = require('./DbQueue')
const async = require("async"); const async = require('async')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const oneDay = 24 * 60 * 60 * 1000; const oneDay = 24 * 60 * 60 * 1000
const Settings = require("settings-sharelatex"); const Settings = require('settings-sharelatex')
module.exports = (ProjectPersistenceManager = { module.exports = ProjectPersistenceManager = {
EXPIRY_TIMEOUT: Settings.project_cache_length_ms || oneDay * 2.5,
EXPIRY_TIMEOUT: Settings.project_cache_length_ms || (oneDay * 2.5),
markProjectAsJustAccessed(project_id, callback) { markProjectAsJustAccessed(project_id, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
callback = function(error) {}
}
const job = cb => const job = cb =>
db.Project.findOrCreate({ where: { project_id } }) db.Project.findOrCreate({ where: { project_id } })
.spread( .spread((project, created) =>
(project, created) => project
project.updateAttributes({lastAccessed: new Date()}) .updateAttributes({ lastAccessed: new Date() })
.then(() => cb()) .then(() => cb())
.error(cb) .error(cb)
) )
.error(cb) .error(cb)
; return dbQueue.queue.push(job, callback)
return dbQueue.queue.push(job, callback);
}, },
clearExpiredProjects(callback) { clearExpiredProjects(callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
return ProjectPersistenceManager._findExpiredProjectIds(function(error, project_ids) { callback = function(error) {}
if (error != null) { return callback(error); }
logger.log({project_ids}, "clearing expired projects");
const jobs = (Array.from(project_ids || [])).map((project_id) =>
(project_id =>
callback =>
ProjectPersistenceManager.clearProjectFromCache(project_id, function(err) {
if (err != null) {
logger.error({err, project_id}, "error clearing project");
} }
return callback(); return ProjectPersistenceManager._findExpiredProjectIds(function(
}) error,
project_ids
)(project_id)); ) {
if (error != null) {
return callback(error)
}
logger.log({ project_ids }, 'clearing expired projects')
const jobs = Array.from(project_ids || []).map(project_id =>
(project_id => callback =>
ProjectPersistenceManager.clearProjectFromCache(project_id, function(
err
) {
if (err != null) {
logger.error({ err, project_id }, 'error clearing project')
}
return callback()
}))(project_id)
)
return async.series(jobs, function(error) { return async.series(jobs, function(error) {
if (error != null) { return callback(error); } if (error != null) {
return CompileManager.clearExpiredProjects(ProjectPersistenceManager.EXPIRY_TIMEOUT, error => callback()); return callback(error)
}); }
}); return CompileManager.clearExpiredProjects(
ProjectPersistenceManager.EXPIRY_TIMEOUT,
error => callback()
)
})
})
}, // ignore any errors from deleting directories }, // ignore any errors from deleting directories
clearProject(project_id, user_id, callback) { clearProject(project_id, user_id, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
logger.log({project_id, user_id}, "clearing project for user"); callback = function(error) {}
}
logger.log({ project_id, user_id }, 'clearing project for user')
return CompileManager.clearProject(project_id, user_id, function(error) { return CompileManager.clearProject(project_id, user_id, function(error) {
if (error != null) { return callback(error); } if (error != null) {
return ProjectPersistenceManager.clearProjectFromCache(project_id, function(error) { return callback(error)
if (error != null) { return callback(error); } }
return callback(); return ProjectPersistenceManager.clearProjectFromCache(
}); project_id,
}); function(error) {
if (error != null) {
return callback(error)
}
return callback()
}
)
})
}, },
clearProjectFromCache(project_id, callback) { clearProjectFromCache(project_id, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
logger.log({project_id}, "clearing project from cache"); callback = function(error) {}
}
logger.log({ project_id }, 'clearing project from cache')
return UrlCache.clearProject(project_id, function(error) { return UrlCache.clearProject(project_id, function(error) {
if (error != null) { if (error != null) {
logger.err({error, project_id}, "error clearing project from cache"); logger.err({ error, project_id }, 'error clearing project from cache')
return callback(error); return callback(error)
} }
return ProjectPersistenceManager._clearProjectFromDatabase(project_id, function(error) { return ProjectPersistenceManager._clearProjectFromDatabase(
project_id,
function(error) {
if (error != null) { if (error != null) {
logger.err({error, project_id}, "error clearing project from database"); logger.err(
{ error, project_id },
'error clearing project from database'
)
} }
return callback(error); return callback(error)
}); }
}); )
})
}, },
_clearProjectFromDatabase(project_id, callback) { _clearProjectFromDatabase(project_id, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
logger.log({project_id}, "clearing project from database"); callback = function(error) {}
}
logger.log({ project_id }, 'clearing project from database')
const job = cb => const job = cb =>
db.Project.destroy({ where: { project_id } }) db.Project.destroy({ where: { project_id } })
.then(() => cb()) .then(() => cb())
.error(cb) .error(cb)
; return dbQueue.queue.push(job, callback)
return dbQueue.queue.push(job, callback);
}, },
_findExpiredProjectIds(callback) { _findExpiredProjectIds(callback) {
if (callback == null) { callback = function(error, project_ids) {}; } if (callback == null) {
const job = function(cb){ callback = function(error, project_ids) {}
const keepProjectsFrom = new Date(Date.now() - ProjectPersistenceManager.EXPIRY_TIMEOUT); }
const q = {}; const job = function(cb) {
q[db.op.lt] = keepProjectsFrom; const keepProjectsFrom = new Date(
return db.Project.findAll({where:{lastAccessed:q}}) Date.now() - ProjectPersistenceManager.EXPIRY_TIMEOUT
.then(projects => cb(null, projects.map(project => project.project_id))).error(cb); )
}; const q = {}
q[db.op.lt] = keepProjectsFrom
return dbQueue.queue.push(job, callback); return db.Project.findAll({ where: { lastAccessed: q } })
.then(projects =>
cb(
null,
projects.map(project => project.project_id)
)
)
.error(cb)
} }
});
return dbQueue.queue.push(job, callback)
}
}
logger.log({EXPIRY_TIMEOUT: ProjectPersistenceManager.EXPIRY_TIMEOUT}, "project assets kept timeout"); logger.log(
{ EXPIRY_TIMEOUT: ProjectPersistenceManager.EXPIRY_TIMEOUT },
'project assets kept timeout'
)

View File

@@ -17,67 +17,75 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let RequestParser; let RequestParser
const settings = require("settings-sharelatex"); const settings = require('settings-sharelatex')
module.exports = (RequestParser = { module.exports = RequestParser = {
VALID_COMPILERS: ["pdflatex", "latex", "xelatex", "lualatex"], VALID_COMPILERS: ['pdflatex', 'latex', 'xelatex', 'lualatex'],
MAX_TIMEOUT: 600, MAX_TIMEOUT: 600,
parse(body, callback) { parse(body, callback) {
let resource; let resource
if (callback == null) { callback = function(error, data) {}; } if (callback == null) {
const response = {}; callback = function(error, data) {}
}
const response = {}
if ((body.compile == null)) { if (body.compile == null) {
return callback("top level object should have a compile attribute"); return callback('top level object should have a compile attribute')
} }
const { compile } = body; const { compile } = body
if (!compile.options) { compile.options = {}; } if (!compile.options) {
compile.options = {}
}
try { try {
response.compiler = this._parseAttribute("compiler", response.compiler = this._parseAttribute(
compile.options.compiler, { 'compiler',
compile.options.compiler,
{
validValues: this.VALID_COMPILERS, validValues: this.VALID_COMPILERS,
default: "pdflatex", default: 'pdflatex',
type: "string" type: 'string'
} }
); )
response.timeout = this._parseAttribute("timeout", response.timeout = this._parseAttribute(
compile.options.timeout, { 'timeout',
compile.options.timeout,
{
default: RequestParser.MAX_TIMEOUT, default: RequestParser.MAX_TIMEOUT,
type: "number" type: 'number'
} }
); )
response.imageName = this._parseAttribute("imageName", response.imageName = this._parseAttribute(
'imageName',
compile.options.imageName, compile.options.imageName,
{type: "string"}); { type: 'string' }
response.draft = this._parseAttribute("draft", )
compile.options.draft, { response.draft = this._parseAttribute('draft', compile.options.draft, {
default: false, default: false,
type: "boolean" type: 'boolean'
} })
); response.check = this._parseAttribute('check', compile.options.check, {
response.check = this._parseAttribute("check", type: 'string'
compile.options.check, })
{type: "string"}); response.flags = this._parseAttribute('flags', compile.options.flags, {
response.flags = this._parseAttribute("flags",
compile.options.flags, {
default: [], default: [],
type: "object" type: 'object'
} })
);
// The syncType specifies whether the request contains all // The syncType specifies whether the request contains all
// resources (full) or only those resources to be updated // resources (full) or only those resources to be updated
// in-place (incremental). // in-place (incremental).
response.syncType = this._parseAttribute("syncType", response.syncType = this._parseAttribute(
compile.options.syncType, { 'syncType',
validValues: ["full", "incremental"], compile.options.syncType,
type: "string" {
validValues: ['full', 'incremental'],
type: 'string'
} }
); )
// The syncState is an identifier passed in with the request // The syncState is an identifier passed in with the request
// which has the property that it changes when any resource is // which has the property that it changes when any resource is
@@ -88,66 +96,75 @@ module.exports = (RequestParser = {
// //
// on syncType incremental the syncState identifier must match // on syncType incremental the syncState identifier must match
// the stored value // the stored value
response.syncState = this._parseAttribute("syncState", response.syncState = this._parseAttribute(
'syncState',
compile.options.syncState, compile.options.syncState,
{type: "string"}); { type: 'string' }
)
if (response.timeout > RequestParser.MAX_TIMEOUT) { if (response.timeout > RequestParser.MAX_TIMEOUT) {
response.timeout = RequestParser.MAX_TIMEOUT; response.timeout = RequestParser.MAX_TIMEOUT
} }
response.timeout = response.timeout * 1000; // milliseconds response.timeout = response.timeout * 1000 // milliseconds
response.resources = ((() => { response.resources = (() => {
const result = []; const result = []
for (resource of Array.from((compile.resources || []))) { result.push(this._parseResource(resource)); for (resource of Array.from(compile.resources || [])) {
result.push(this._parseResource(resource))
} }
return result; return result
})()); })()
const rootResourcePath = this._parseAttribute("rootResourcePath", const rootResourcePath = this._parseAttribute(
compile.rootResourcePath, { 'rootResourcePath',
default: "main.tex", compile.rootResourcePath,
type: "string" {
default: 'main.tex',
type: 'string'
} }
); )
const originalRootResourcePath = rootResourcePath; const originalRootResourcePath = rootResourcePath
const sanitizedRootResourcePath = RequestParser._sanitizePath(rootResourcePath); const sanitizedRootResourcePath = RequestParser._sanitizePath(
response.rootResourcePath = RequestParser._checkPath(sanitizedRootResourcePath); rootResourcePath
)
response.rootResourcePath = RequestParser._checkPath(
sanitizedRootResourcePath
)
for (resource of Array.from(response.resources)) { for (resource of Array.from(response.resources)) {
if (resource.path === originalRootResourcePath) { if (resource.path === originalRootResourcePath) {
resource.path = sanitizedRootResourcePath; resource.path = sanitizedRootResourcePath
} }
} }
} catch (error1) { } catch (error1) {
const error = error1; const error = error1
return callback(error); return callback(error)
} }
return callback(null, response); return callback(null, response)
}, },
_parseResource(resource) { _parseResource(resource) {
let modified; let modified
if ((resource.path == null) || (typeof resource.path !== "string")) { if (resource.path == null || typeof resource.path !== 'string') {
throw "all resources should have a path attribute"; throw 'all resources should have a path attribute'
} }
if (resource.modified != null) { if (resource.modified != null) {
modified = new Date(resource.modified); modified = new Date(resource.modified)
if (isNaN(modified.getTime())) { if (isNaN(modified.getTime())) {
throw `resource modified date could not be understood: ${resource.modified}`; throw `resource modified date could not be understood: ${resource.modified}`
} }
} }
if ((resource.url == null) && (resource.content == null)) { if (resource.url == null && resource.content == null) {
throw "all resources should have either a url or content attribute"; throw 'all resources should have either a url or content attribute'
} }
if ((resource.content != null) && (typeof resource.content !== "string")) { if (resource.content != null && typeof resource.content !== 'string') {
throw "content attribute should be a string"; throw 'content attribute should be a string'
} }
if ((resource.url != null) && (typeof resource.url !== "string")) { if (resource.url != null && typeof resource.url !== 'string') {
throw "url attribute should be a string"; throw 'url attribute should be a string'
} }
return { return {
@@ -155,39 +172,46 @@ module.exports = (RequestParser = {
modified, modified,
url: resource.url, url: resource.url,
content: resource.content content: resource.content
}; }
}, },
_parseAttribute(name, attribute, options) { _parseAttribute(name, attribute, options) {
if (attribute != null) { if (attribute != null) {
if (options.validValues != null) { if (options.validValues != null) {
if (options.validValues.indexOf(attribute) === -1) { if (options.validValues.indexOf(attribute) === -1) {
throw `${name} attribute should be one of: ${options.validValues.join(", ")}`; throw `${name} attribute should be one of: ${options.validValues.join(
', '
)}`
} }
} }
if (options.type != null) { if (options.type != null) {
if (typeof attribute !== options.type) { if (typeof attribute !== options.type) {
throw `${name} attribute should be a ${options.type}`; throw `${name} attribute should be a ${options.type}`
} }
} }
} else { } else {
if (options.default != null) { return options.default; } if (options.default != null) {
return options.default
} }
return attribute; }
return attribute
}, },
_sanitizePath(path) { _sanitizePath(path) {
// See http://php.net/manual/en/function.escapeshellcmd.php // See http://php.net/manual/en/function.escapeshellcmd.php
return path.replace(/[\#\&\;\`\|\*\?\~\<\>\^\(\)\[\]\{\}\$\\\x0A\xFF\x00]/g, ""); return path.replace(
/[\#\&\;\`\|\*\?\~\<\>\^\(\)\[\]\{\}\$\\\x0A\xFF\x00]/g,
''
)
}, },
_checkPath(path) { _checkPath(path) {
// check that the request does not use a relative path // check that the request does not use a relative path
for (const dir of Array.from(path.split('/'))) { for (const dir of Array.from(path.split('/'))) {
if (dir === '..') { if (dir === '..') {
throw "relative path in root resource"; throw 'relative path in root resource'
} }
} }
return path; return path
}
} }
});

View File

@@ -13,16 +13,15 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let ResourceStateManager; let ResourceStateManager
const Path = require("path"); const Path = require('path')
const fs = require("fs"); const fs = require('fs')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const settings = require("settings-sharelatex"); const settings = require('settings-sharelatex')
const Errors = require("./Errors"); const Errors = require('./Errors')
const SafeReader = require("./SafeReader"); const SafeReader = require('./SafeReader')
module.exports = (ResourceStateManager = {
module.exports = ResourceStateManager = {
// The sync state is an identifier which must match for an // The sync state is an identifier which must match for an
// incremental update to be allowed. // incremental update to be allowed.
// //
@@ -37,78 +36,119 @@ module.exports = (ResourceStateManager = {
// content. The sync state identifier must change if any docs or // content. The sync state identifier must change if any docs or
// files are moved, added, deleted or renamed. // files are moved, added, deleted or renamed.
SYNC_STATE_FILE: ".project-sync-state", SYNC_STATE_FILE: '.project-sync-state',
SYNC_STATE_MAX_SIZE: 128 * 1024, SYNC_STATE_MAX_SIZE: 128 * 1024,
saveProjectState(state, resources, basePath, callback) { saveProjectState(state, resources, basePath, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
const stateFile = Path.join(basePath, this.SYNC_STATE_FILE); callback = function(error) {}
if ((state == null)) { // remove the file if no state passed in
logger.log({state, basePath}, "clearing sync state");
return fs.unlink(stateFile, function(err) {
if ((err != null) && (err.code !== 'ENOENT')) {
return callback(err);
} else {
return callback();
} }
}); const stateFile = Path.join(basePath, this.SYNC_STATE_FILE)
if (state == null) {
// remove the file if no state passed in
logger.log({ state, basePath }, 'clearing sync state')
return fs.unlink(stateFile, function(err) {
if (err != null && err.code !== 'ENOENT') {
return callback(err)
} else { } else {
logger.log({state, basePath}, "writing sync state"); return callback()
const resourceList = (Array.from(resources).map((resource) => resource.path)); }
return fs.writeFile(stateFile, [...Array.from(resourceList), `stateHash:${state}`].join("\n"), callback); })
} else {
logger.log({ state, basePath }, 'writing sync state')
const resourceList = Array.from(resources).map(resource => resource.path)
return fs.writeFile(
stateFile,
[...Array.from(resourceList), `stateHash:${state}`].join('\n'),
callback
)
} }
}, },
checkProjectStateMatches(state, basePath, callback) { checkProjectStateMatches(state, basePath, callback) {
if (callback == null) { callback = function(error, resources) {}; } if (callback == null) {
const stateFile = Path.join(basePath, this.SYNC_STATE_FILE); callback = function(error, resources) {}
const size = this.SYNC_STATE_MAX_SIZE; }
return SafeReader.readFile(stateFile, size, 'utf8', function(err, result, bytesRead) { const stateFile = Path.join(basePath, this.SYNC_STATE_FILE)
if (err != null) { return callback(err); } const size = this.SYNC_STATE_MAX_SIZE
return SafeReader.readFile(stateFile, size, 'utf8', function(
err,
result,
bytesRead
) {
if (err != null) {
return callback(err)
}
if (bytesRead === size) { if (bytesRead === size) {
logger.error({file:stateFile, size, bytesRead}, "project state file truncated"); logger.error(
{ file: stateFile, size, bytesRead },
'project state file truncated'
)
} }
const array = __guard__(result != null ? result.toString() : undefined, x => x.split("\n")) || []; const array =
const adjustedLength = Math.max(array.length, 1); __guard__(result != null ? result.toString() : undefined, x =>
const resourceList = array.slice(0, adjustedLength - 1); x.split('\n')
const oldState = array[adjustedLength - 1]; ) || []
const newState = `stateHash:${state}`; const adjustedLength = Math.max(array.length, 1)
logger.log({state, oldState, basePath, stateMatches: (newState === oldState)}, "checking sync state"); const resourceList = array.slice(0, adjustedLength - 1)
const oldState = array[adjustedLength - 1]
const newState = `stateHash:${state}`
logger.log(
{ state, oldState, basePath, stateMatches: newState === oldState },
'checking sync state'
)
if (newState !== oldState) { if (newState !== oldState) {
return callback(new Errors.FilesOutOfSyncError("invalid state for incremental update")); return callback(
new Errors.FilesOutOfSyncError('invalid state for incremental update')
)
} else { } else {
const resources = (Array.from(resourceList).map((path) => ({path}))); const resources = Array.from(resourceList).map(path => ({ path }))
return callback(null, resources); return callback(null, resources)
} }
}); })
}, },
checkResourceFiles(resources, allFiles, basePath, callback) { checkResourceFiles(resources, allFiles, basePath, callback) {
// check the paths are all relative to current directory // check the paths are all relative to current directory
let file; let file
if (callback == null) { callback = function(error) {}; } if (callback == null) {
callback = function(error) {}
}
for (file of Array.from(resources || [])) { for (file of Array.from(resources || [])) {
for (const dir of Array.from(__guard__(file != null ? file.path : undefined, x => x.split('/')))) { for (const dir of Array.from(
__guard__(file != null ? file.path : undefined, x => x.split('/'))
)) {
if (dir === '..') { if (dir === '..') {
return callback(new Error("relative path in resource file list")); return callback(new Error('relative path in resource file list'))
} }
} }
} }
// check if any of the input files are not present in list of files // check if any of the input files are not present in list of files
const seenFile = {}; const seenFile = {}
for (file of Array.from(allFiles)) { for (file of Array.from(allFiles)) {
seenFile[file] = true; seenFile[file] = true
} }
const missingFiles = (Array.from(resources).filter((resource) => !seenFile[resource.path]).map((resource) => resource.path)); const missingFiles = Array.from(resources)
.filter(resource => !seenFile[resource.path])
.map(resource => resource.path)
if ((missingFiles != null ? missingFiles.length : undefined) > 0) { if ((missingFiles != null ? missingFiles.length : undefined) > 0) {
logger.err({missingFiles, basePath, allFiles, resources}, "missing input files for project"); logger.err(
return callback(new Errors.FilesOutOfSyncError("resource files missing in incremental update")); { missingFiles, basePath, allFiles, resources },
'missing input files for project'
)
return callback(
new Errors.FilesOutOfSyncError(
'resource files missing in incremental update'
)
)
} else { } else {
return callback(); return callback()
}
} }
} }
});
function __guard__(value, transform) { function __guard__(value, transform) {
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
} }

View File

@@ -14,202 +14,339 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let ResourceWriter; let ResourceWriter
const UrlCache = require("./UrlCache"); const UrlCache = require('./UrlCache')
const Path = require("path"); const Path = require('path')
const fs = require("fs"); const fs = require('fs')
const async = require("async"); const async = require('async')
const mkdirp = require("mkdirp"); const mkdirp = require('mkdirp')
const OutputFileFinder = require("./OutputFileFinder"); const OutputFileFinder = require('./OutputFileFinder')
const ResourceStateManager = require("./ResourceStateManager"); const ResourceStateManager = require('./ResourceStateManager')
const Metrics = require("./Metrics"); const Metrics = require('./Metrics')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const settings = require("settings-sharelatex"); const settings = require('settings-sharelatex')
const parallelFileDownloads = settings.parallelFileDownloads || 1; const parallelFileDownloads = settings.parallelFileDownloads || 1
module.exports = (ResourceWriter = {
module.exports = ResourceWriter = {
syncResourcesToDisk(request, basePath, callback) { syncResourcesToDisk(request, basePath, callback) {
if (callback == null) { callback = function(error, resourceList) {}; } if (callback == null) {
if (request.syncType === "incremental") { callback = function(error, resourceList) {}
logger.log({project_id: request.project_id, user_id: request.user_id}, "incremental sync"); }
return ResourceStateManager.checkProjectStateMatches(request.syncState, basePath, function(error, resourceList) { if (request.syncType === 'incremental') {
if (error != null) { return callback(error); } logger.log(
return ResourceWriter._removeExtraneousFiles(resourceList, basePath, function(error, outputFiles, allFiles) { { project_id: request.project_id, user_id: request.user_id },
if (error != null) { return callback(error); } 'incremental sync'
return ResourceStateManager.checkResourceFiles(resourceList, allFiles, basePath, function(error) { )
if (error != null) { return callback(error); } return ResourceStateManager.checkProjectStateMatches(
return ResourceWriter.saveIncrementalResourcesToDisk(request.project_id, request.resources, basePath, function(error) { request.syncState,
if (error != null) { return callback(error); } basePath,
return callback(null, resourceList); function(error, resourceList) {
}); if (error != null) {
}); return callback(error)
}); }
}); return ResourceWriter._removeExtraneousFiles(
resourceList,
basePath,
function(error, outputFiles, allFiles) {
if (error != null) {
return callback(error)
}
return ResourceStateManager.checkResourceFiles(
resourceList,
allFiles,
basePath,
function(error) {
if (error != null) {
return callback(error)
}
return ResourceWriter.saveIncrementalResourcesToDisk(
request.project_id,
request.resources,
basePath,
function(error) {
if (error != null) {
return callback(error)
}
return callback(null, resourceList)
}
)
}
)
}
)
}
)
} else { } else {
logger.log({project_id: request.project_id, user_id: request.user_id}, "full sync"); logger.log(
return this.saveAllResourcesToDisk(request.project_id, request.resources, basePath, function(error) { { project_id: request.project_id, user_id: request.user_id },
if (error != null) { return callback(error); } 'full sync'
return ResourceStateManager.saveProjectState(request.syncState, request.resources, basePath, function(error) { )
if (error != null) { return callback(error); } return this.saveAllResourcesToDisk(
return callback(null, request.resources); request.project_id,
}); request.resources,
}); basePath,
function(error) {
if (error != null) {
return callback(error)
}
return ResourceStateManager.saveProjectState(
request.syncState,
request.resources,
basePath,
function(error) {
if (error != null) {
return callback(error)
}
return callback(null, request.resources)
}
)
}
)
} }
}, },
saveIncrementalResourcesToDisk(project_id, resources, basePath, callback) { saveIncrementalResourcesToDisk(project_id, resources, basePath, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
callback = function(error) {}
}
return this._createDirectory(basePath, error => { return this._createDirectory(basePath, error => {
if (error != null) { return callback(error); } if (error != null) {
const jobs = Array.from(resources).map((resource) => return callback(error)
}
const jobs = Array.from(resources).map(resource =>
(resource => { (resource => {
return callback => this._writeResourceToDisk(project_id, resource, basePath, callback); return callback =>
})(resource)); this._writeResourceToDisk(project_id, resource, basePath, callback)
return async.parallelLimit(jobs, parallelFileDownloads, callback); })(resource)
}); )
return async.parallelLimit(jobs, parallelFileDownloads, callback)
})
}, },
saveAllResourcesToDisk(project_id, resources, basePath, callback) { saveAllResourcesToDisk(project_id, resources, basePath, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
callback = function(error) {}
}
return this._createDirectory(basePath, error => { return this._createDirectory(basePath, error => {
if (error != null) { return callback(error); } if (error != null) {
return callback(error)
}
return this._removeExtraneousFiles(resources, basePath, error => { return this._removeExtraneousFiles(resources, basePath, error => {
if (error != null) { return callback(error); } if (error != null) {
const jobs = Array.from(resources).map((resource) => return callback(error)
}
const jobs = Array.from(resources).map(resource =>
(resource => { (resource => {
return callback => this._writeResourceToDisk(project_id, resource, basePath, callback); return callback =>
})(resource)); this._writeResourceToDisk(
return async.parallelLimit(jobs, parallelFileDownloads, callback); project_id,
}); resource,
}); basePath,
callback
)
})(resource)
)
return async.parallelLimit(jobs, parallelFileDownloads, callback)
})
})
}, },
_createDirectory(basePath, callback) { _createDirectory(basePath, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
callback = function(error) {}
}
return fs.mkdir(basePath, function(err) { return fs.mkdir(basePath, function(err) {
if (err != null) { if (err != null) {
if (err.code === 'EEXIST') { if (err.code === 'EEXIST') {
return callback(); return callback()
} else { } else {
logger.log({err, dir:basePath}, "error creating directory"); logger.log({ err, dir: basePath }, 'error creating directory')
return callback(err); return callback(err)
} }
} else { } else {
return callback(); return callback()
} }
}); })
}, },
_removeExtraneousFiles(resources, basePath, _callback) { _removeExtraneousFiles(resources, basePath, _callback) {
if (_callback == null) { _callback = function(error, outputFiles, allFiles) {}; } if (_callback == null) {
const timer = new Metrics.Timer("unlink-output-files"); _callback = function(error, outputFiles, allFiles) {}
}
const timer = new Metrics.Timer('unlink-output-files')
const callback = function(error, ...result) { const callback = function(error, ...result) {
timer.done(); timer.done()
return _callback(error, ...Array.from(result)); return _callback(error, ...Array.from(result))
}; }
return OutputFileFinder.findOutputFiles(resources, basePath, function(error, outputFiles, allFiles) { return OutputFileFinder.findOutputFiles(resources, basePath, function(
if (error != null) { return callback(error); } error,
outputFiles,
allFiles
) {
if (error != null) {
return callback(error)
}
const jobs = []; const jobs = []
for (const file of Array.from(outputFiles || [])) { for (const file of Array.from(outputFiles || [])) {
(function(file) { ;(function(file) {
const { path } = file; const { path } = file
let should_delete = true; let should_delete = true
if (path.match(/^output\./) || path.match(/\.aux$/) || path.match(/^cache\//)) { // knitr cache if (
should_delete = false; path.match(/^output\./) ||
path.match(/\.aux$/) ||
path.match(/^cache\//)
) {
// knitr cache
should_delete = false
} }
if (path.match(/^output-.*/)) { // Tikz cached figures (default case) if (path.match(/^output-.*/)) {
should_delete = false; // Tikz cached figures (default case)
should_delete = false
} }
if (path.match(/\.(pdf|dpth|md5)$/)) { // Tikz cached figures (by extension) if (path.match(/\.(pdf|dpth|md5)$/)) {
should_delete = false; // Tikz cached figures (by extension)
should_delete = false
} }
if (path.match(/\.(pygtex|pygstyle)$/) || path.match(/(^|\/)_minted-[^\/]+\//)) { // minted files/directory if (
should_delete = false; path.match(/\.(pygtex|pygstyle)$/) ||
path.match(/(^|\/)_minted-[^\/]+\//)
) {
// minted files/directory
should_delete = false
} }
if (path.match(/\.md\.tex$/) || path.match(/(^|\/)_markdown_[^\/]+\//)) { // markdown files/directory if (
should_delete = false; path.match(/\.md\.tex$/) ||
path.match(/(^|\/)_markdown_[^\/]+\//)
) {
// markdown files/directory
should_delete = false
} }
if (path.match(/-eps-converted-to\.pdf$/)) { // Epstopdf generated files if (path.match(/-eps-converted-to\.pdf$/)) {
should_delete = false; // Epstopdf generated files
should_delete = false
} }
if ((path === "output.pdf") || (path === "output.dvi") || (path === "output.log") || (path === "output.xdv")) { if (
should_delete = true; path === 'output.pdf' ||
path === 'output.dvi' ||
path === 'output.log' ||
path === 'output.xdv'
) {
should_delete = true
} }
if (path === "output.tex") { // created by TikzManager if present in output files if (path === 'output.tex') {
should_delete = true; // created by TikzManager if present in output files
should_delete = true
} }
if (should_delete) { if (should_delete) {
return jobs.push(callback => ResourceWriter._deleteFileIfNotDirectory(Path.join(basePath, path), callback)); return jobs.push(callback =>
ResourceWriter._deleteFileIfNotDirectory(
Path.join(basePath, path),
callback
)
)
} }
})(file); })(file)
} }
return async.series(jobs, function(error) { return async.series(jobs, function(error) {
if (error != null) { return callback(error); } if (error != null) {
return callback(null, outputFiles, allFiles); return callback(error)
}); }
}); return callback(null, outputFiles, allFiles)
})
})
}, },
_deleteFileIfNotDirectory(path, callback) { _deleteFileIfNotDirectory(path, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
callback = function(error) {}
}
return fs.stat(path, function(error, stat) { return fs.stat(path, function(error, stat) {
if ((error != null) && (error.code === 'ENOENT')) { if (error != null && error.code === 'ENOENT') {
return callback(); return callback()
} else if (error != null) { } else if (error != null) {
logger.err({err: error, path}, "error stating file in deleteFileIfNotDirectory"); logger.err(
return callback(error); { err: error, path },
'error stating file in deleteFileIfNotDirectory'
)
return callback(error)
} else if (stat.isFile()) { } else if (stat.isFile()) {
return fs.unlink(path, function(error) { return fs.unlink(path, function(error) {
if (error != null) { if (error != null) {
logger.err({err: error, path}, "error removing file in deleteFileIfNotDirectory"); logger.err(
return callback(error); { err: error, path },
'error removing file in deleteFileIfNotDirectory'
)
return callback(error)
} else { } else {
return callback(); return callback()
} }
}); })
} else { } else {
return callback(); return callback()
} }
}); })
}, },
_writeResourceToDisk(project_id, resource, basePath, callback) { _writeResourceToDisk(project_id, resource, basePath, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
return ResourceWriter.checkPath(basePath, resource.path, function(error, path) { callback = function(error) {}
if (error != null) { return callback(error); } }
return ResourceWriter.checkPath(basePath, resource.path, function(
error,
path
) {
if (error != null) {
return callback(error)
}
return mkdirp(Path.dirname(path), function(error) { return mkdirp(Path.dirname(path), function(error) {
if (error != null) { return callback(error); } if (error != null) {
return callback(error)
}
// TODO: Don't overwrite file if it hasn't been modified // TODO: Don't overwrite file if it hasn't been modified
if (resource.url != null) { if (resource.url != null) {
return UrlCache.downloadUrlToFile(project_id, resource.url, path, resource.modified, function(err){ return UrlCache.downloadUrlToFile(
project_id,
resource.url,
path,
resource.modified,
function(err) {
if (err != null) { if (err != null) {
logger.err({err, project_id, path, resource_url:resource.url, modified:resource.modified}, "error downloading file for resources"); logger.err(
{
err,
project_id,
path,
resource_url: resource.url,
modified: resource.modified
},
'error downloading file for resources'
)
} }
return callback(); return callback()
}); // try and continue compiling even if http resource can not be downloaded at this time }
) // try and continue compiling even if http resource can not be downloaded at this time
} else { } else {
const process = require("process"); const process = require('process')
fs.writeFile(path, resource.content, callback); fs.writeFile(path, resource.content, callback)
try { try {
let result; let result
return result = fs.lstatSync(path); return (result = fs.lstatSync(path))
} catch (e) {} } catch (e) {}
} }
}); })
}); })
}, },
checkPath(basePath, resourcePath, callback) { checkPath(basePath, resourcePath, callback) {
const path = Path.normalize(Path.join(basePath, resourcePath)); const path = Path.normalize(Path.join(basePath, resourcePath))
if (path.slice(0, basePath.length + 1) !== (basePath + "/")) { if (path.slice(0, basePath.length + 1) !== basePath + '/') {
return callback(new Error("resource path is outside root directory")); return callback(new Error('resource path is outside root directory'))
} else { } else {
return callback(null, path); return callback(null, path)
}
} }
} }
});

View File

@@ -12,36 +12,49 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let SafeReader; let SafeReader
const fs = require("fs"); const fs = require('fs')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
module.exports = (SafeReader = {
module.exports = SafeReader = {
// safely read up to size bytes from a file and return result as a // safely read up to size bytes from a file and return result as a
// string // string
readFile(file, size, encoding, callback) { readFile(file, size, encoding, callback) {
if (callback == null) { callback = function(error, result) {}; } if (callback == null) {
callback = function(error, result) {}
}
return fs.open(file, 'r', function(err, fd) { return fs.open(file, 'r', function(err, fd) {
if ((err != null) && (err.code === 'ENOENT')) { return callback(); } if (err != null && err.code === 'ENOENT') {
if (err != null) { return callback(err); } return callback()
}
if (err != null) {
return callback(err)
}
// safely return always closing the file // safely return always closing the file
const callbackWithClose = (err, ...result) => const callbackWithClose = (err, ...result) =>
fs.close(fd, function(err1) { fs.close(fd, function(err1) {
if (err != null) { return callback(err); } if (err != null) {
if (err1 != null) { return callback(err1); } return callback(err)
return callback(null, ...Array.from(result)); }
}) if (err1 != null) {
; return callback(err1)
}
const buff = new Buffer(size, 0); // fill with zeros return callback(null, ...Array.from(result))
return fs.read(fd, buff, 0, buff.length, 0, function(err, bytesRead, buffer) { })
if (err != null) { return callbackWithClose(err); } const buff = new Buffer(size, 0) // fill with zeros
const result = buffer.toString(encoding, 0, bytesRead); return fs.read(fd, buff, 0, buff.length, 0, function(
return callbackWithClose(null, result, bytesRead); err,
}); bytesRead,
}); buffer
) {
if (err != null) {
return callbackWithClose(err)
}
const result = buffer.toString(encoding, 0, bytesRead)
return callbackWithClose(null, result, bytesRead)
})
})
}
} }
});

View File

@@ -14,59 +14,81 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let ForbidSymlinks; let ForbidSymlinks
const Path = require("path"); const Path = require('path')
const fs = require("fs"); const fs = require('fs')
const Settings = require("settings-sharelatex"); const Settings = require('settings-sharelatex')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const url = require("url"); const url = require('url')
module.exports = (ForbidSymlinks = function(staticFn, root, options) { module.exports = ForbidSymlinks = function(staticFn, root, options) {
const expressStatic = staticFn(root, options); const expressStatic = staticFn(root, options)
const basePath = Path.resolve(root); const basePath = Path.resolve(root)
return function(req, res, next) { return function(req, res, next) {
let file, project_id, result; let file, project_id, result
const path = __guard__(url.parse(req.url), x => x.pathname); const path = __guard__(url.parse(req.url), x => x.pathname)
// check that the path is of the form /project_id_or_name/path/to/file.log // check that the path is of the form /project_id_or_name/path/to/file.log
if (result = path.match(/^\/?([a-zA-Z0-9_-]+)\/(.*)/)) { if ((result = path.match(/^\/?([a-zA-Z0-9_-]+)\/(.*)/))) {
project_id = result[1]; project_id = result[1]
file = result[2]; file = result[2]
} else { } else {
logger.warn({path}, "unrecognized file request"); logger.warn({ path }, 'unrecognized file request')
return res.sendStatus(404); return res.sendStatus(404)
} }
// check that the file does not use a relative path // check that the file does not use a relative path
for (const dir of Array.from(file.split('/'))) { for (const dir of Array.from(file.split('/'))) {
if (dir === '..') { if (dir === '..') {
logger.warn({path}, "attempt to use a relative path"); logger.warn({ path }, 'attempt to use a relative path')
return res.sendStatus(404); return res.sendStatus(404)
} }
} }
// check that the requested path is normalized // check that the requested path is normalized
const requestedFsPath = `${basePath}/${project_id}/${file}`; const requestedFsPath = `${basePath}/${project_id}/${file}`
if (requestedFsPath !== Path.normalize(requestedFsPath)) { if (requestedFsPath !== Path.normalize(requestedFsPath)) {
logger.error({path: requestedFsPath}, "requestedFsPath is not normalized"); logger.error(
return res.sendStatus(404); { path: requestedFsPath },
'requestedFsPath is not normalized'
)
return res.sendStatus(404)
} }
// check that the requested path is not a symlink // check that the requested path is not a symlink
return fs.realpath(requestedFsPath, function(err, realFsPath) { return fs.realpath(requestedFsPath, function(err, realFsPath) {
if (err != null) { if (err != null) {
if (err.code === 'ENOENT') { if (err.code === 'ENOENT') {
return res.sendStatus(404); return res.sendStatus(404)
} else { } else {
logger.error({err, requestedFsPath, realFsPath, path: req.params[0], project_id: req.params.project_id}, "error checking file access"); logger.error(
return res.sendStatus(500); {
err,
requestedFsPath,
realFsPath,
path: req.params[0],
project_id: req.params.project_id
},
'error checking file access'
)
return res.sendStatus(500)
} }
} else if (requestedFsPath !== realFsPath) { } else if (requestedFsPath !== realFsPath) {
logger.warn({requestedFsPath, realFsPath, path: req.params[0], project_id: req.params.project_id}, "trying to access a different file (symlink), aborting"); logger.warn(
return res.sendStatus(404); {
requestedFsPath,
realFsPath,
path: req.params[0],
project_id: req.params.project_id
},
'trying to access a different file (symlink), aborting'
)
return res.sendStatus(404)
} else { } else {
return expressStatic(req, res, next); return expressStatic(req, res, next)
}
})
}
} }
});
};
});
function __guard__(value, transform) { function __guard__(value, transform) {
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; return typeof value !== 'undefined' && value !== null
? transform(value)
: undefined
} }

View File

@@ -11,52 +11,84 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let TikzManager; let TikzManager
const fs = require("fs"); const fs = require('fs')
const Path = require("path"); const Path = require('path')
const ResourceWriter = require("./ResourceWriter"); const ResourceWriter = require('./ResourceWriter')
const SafeReader = require("./SafeReader"); const SafeReader = require('./SafeReader')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
// for \tikzexternalize or pstool to work the main file needs to match the // for \tikzexternalize or pstool to work the main file needs to match the
// jobname. Since we set the -jobname to output, we have to create a // jobname. Since we set the -jobname to output, we have to create a
// copy of the main file as 'output.tex'. // copy of the main file as 'output.tex'.
module.exports = (TikzManager = { module.exports = TikzManager = {
checkMainFile(compileDir, mainFile, resources, callback) { checkMainFile(compileDir, mainFile, resources, callback) {
// if there's already an output.tex file, we don't want to touch it // if there's already an output.tex file, we don't want to touch it
if (callback == null) { callback = function(error, needsMainFile) {}; } if (callback == null) {
callback = function(error, needsMainFile) {}
}
for (const resource of Array.from(resources)) { for (const resource of Array.from(resources)) {
if (resource.path === "output.tex") { if (resource.path === 'output.tex') {
logger.log({compileDir, mainFile}, "output.tex already in resources"); logger.log({ compileDir, mainFile }, 'output.tex already in resources')
return callback(null, false); return callback(null, false)
} }
} }
// if there's no output.tex, see if we are using tikz/pgf or pstool in the main file // if there's no output.tex, see if we are using tikz/pgf or pstool in the main file
return ResourceWriter.checkPath(compileDir, mainFile, function(error, path) { return ResourceWriter.checkPath(compileDir, mainFile, function(
if (error != null) { return callback(error); } error,
return SafeReader.readFile(path, 65536, "utf8", function(error, content) { path
if (error != null) { return callback(error); } ) {
const usesTikzExternalize = (content != null ? content.indexOf("\\tikzexternalize") : undefined) >= 0; if (error != null) {
const usesPsTool = (content != null ? content.indexOf("{pstool}") : undefined) >= 0; return callback(error)
logger.log({compileDir, mainFile, usesTikzExternalize, usesPsTool}, "checked for packages needing main file as output.tex"); }
const needsMainFile = (usesTikzExternalize || usesPsTool); return SafeReader.readFile(path, 65536, 'utf8', function(error, content) {
return callback(null, needsMainFile); if (error != null) {
}); return callback(error)
}); }
const usesTikzExternalize =
(content != null
? content.indexOf('\\tikzexternalize')
: undefined) >= 0
const usesPsTool =
(content != null ? content.indexOf('{pstool}') : undefined) >= 0
logger.log(
{ compileDir, mainFile, usesTikzExternalize, usesPsTool },
'checked for packages needing main file as output.tex'
)
const needsMainFile = usesTikzExternalize || usesPsTool
return callback(null, needsMainFile)
})
})
}, },
injectOutputFile(compileDir, mainFile, callback) { injectOutputFile(compileDir, mainFile, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
return ResourceWriter.checkPath(compileDir, mainFile, function(error, path) { callback = function(error) {}
if (error != null) { return callback(error); } }
return fs.readFile(path, "utf8", function(error, content) { return ResourceWriter.checkPath(compileDir, mainFile, function(
if (error != null) { return callback(error); } error,
logger.log({compileDir, mainFile}, "copied file to output.tex as project uses packages which require it"); path
// use wx flag to ensure that output file does not already exist ) {
return fs.writeFile(Path.join(compileDir, "output.tex"), content, {flag:'wx'}, callback); if (error != null) {
}); return callback(error)
}); }
return fs.readFile(path, 'utf8', function(error, content) {
if (error != null) {
return callback(error)
}
logger.log(
{ compileDir, mainFile },
'copied file to output.tex as project uses packages which require it'
)
// use wx flag to ensure that output file does not already exist
return fs.writeFile(
Path.join(compileDir, 'output.tex'),
content,
{ flag: 'wx' },
callback
)
})
})
}
} }
});

View File

@@ -12,185 +12,267 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let UrlCache; let UrlCache
const db = require("./db"); const db = require('./db')
const dbQueue = require("./DbQueue"); const dbQueue = require('./DbQueue')
const UrlFetcher = require("./UrlFetcher"); const UrlFetcher = require('./UrlFetcher')
const Settings = require("settings-sharelatex"); const Settings = require('settings-sharelatex')
const crypto = require("crypto"); const crypto = require('crypto')
const fs = require("fs"); const fs = require('fs')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const async = require("async"); const async = require('async')
module.exports = (UrlCache = { module.exports = UrlCache = {
downloadUrlToFile(project_id, url, destPath, lastModified, callback) { downloadUrlToFile(project_id, url, destPath, lastModified, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
return UrlCache._ensureUrlIsInCache(project_id, url, lastModified, (error, pathToCachedUrl) => { callback = function(error) {}
if (error != null) { return callback(error); } }
return UrlCache._ensureUrlIsInCache(
project_id,
url,
lastModified,
(error, pathToCachedUrl) => {
if (error != null) {
return callback(error)
}
return UrlCache._copyFile(pathToCachedUrl, destPath, function(error) { return UrlCache._copyFile(pathToCachedUrl, destPath, function(error) {
if (error != null) { if (error != null) {
return UrlCache._clearUrlDetails(project_id, url, () => callback(error)); return UrlCache._clearUrlDetails(project_id, url, () =>
callback(error)
)
} else { } else {
return callback(error); return callback(error)
} }
}); })
}); }
)
}, },
clearProject(project_id, callback) { clearProject(project_id, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
callback = function(error) {}
}
return UrlCache._findAllUrlsInProject(project_id, function(error, urls) { return UrlCache._findAllUrlsInProject(project_id, function(error, urls) {
logger.log({project_id, url_count: urls.length}, "clearing project URLs"); logger.log(
if (error != null) { return callback(error); } { project_id, url_count: urls.length },
const jobs = (Array.from(urls || [])).map((url) => 'clearing project URLs'
(url => )
callback => if (error != null) {
return callback(error)
}
const jobs = Array.from(urls || []).map(url =>
(url => callback =>
UrlCache._clearUrlFromCache(project_id, url, function(error) { UrlCache._clearUrlFromCache(project_id, url, function(error) {
if (error != null) { if (error != null) {
logger.error({err: error, project_id, url}, "error clearing project URL"); logger.error(
{ err: error, project_id, url },
'error clearing project URL'
)
} }
return callback(); return callback()
}))(url)
)
return async.series(jobs, callback)
}) })
)(url));
return async.series(jobs, callback);
});
}, },
_ensureUrlIsInCache(project_id, url, lastModified, callback) { _ensureUrlIsInCache(project_id, url, lastModified, callback) {
if (callback == null) { callback = function(error, pathOnDisk) {}; } if (callback == null) {
callback = function(error, pathOnDisk) {}
}
if (lastModified != null) { if (lastModified != null) {
// MYSQL only stores dates to an accuracy of a second but the incoming lastModified might have milliseconds. // MYSQL only stores dates to an accuracy of a second but the incoming lastModified might have milliseconds.
// So round down to seconds // So round down to seconds
lastModified = new Date(Math.floor(lastModified.getTime() / 1000) * 1000); lastModified = new Date(Math.floor(lastModified.getTime() / 1000) * 1000)
}
return UrlCache._doesUrlNeedDownloading(
project_id,
url,
lastModified,
(error, needsDownloading) => {
if (error != null) {
return callback(error)
} }
return UrlCache._doesUrlNeedDownloading(project_id, url, lastModified, (error, needsDownloading) => {
if (error != null) { return callback(error); }
if (needsDownloading) { if (needsDownloading) {
logger.log({url, lastModified}, "downloading URL"); logger.log({ url, lastModified }, 'downloading URL')
return UrlFetcher.pipeUrlToFile(url, UrlCache._cacheFilePathForUrl(project_id, url), error => { return UrlFetcher.pipeUrlToFile(
if (error != null) { return callback(error); } url,
return UrlCache._updateOrCreateUrlDetails(project_id, url, lastModified, error => { UrlCache._cacheFilePathForUrl(project_id, url),
if (error != null) { return callback(error); } error => {
return callback(null, UrlCache._cacheFilePathForUrl(project_id, url)); if (error != null) {
}); return callback(error)
});
} else {
logger.log({url, lastModified}, "URL is up to date in cache");
return callback(null, UrlCache._cacheFilePathForUrl(project_id, url));
} }
}); return UrlCache._updateOrCreateUrlDetails(
project_id,
url,
lastModified,
error => {
if (error != null) {
return callback(error)
}
return callback(
null,
UrlCache._cacheFilePathForUrl(project_id, url)
)
}
)
}
)
} else {
logger.log({ url, lastModified }, 'URL is up to date in cache')
return callback(null, UrlCache._cacheFilePathForUrl(project_id, url))
}
}
)
}, },
_doesUrlNeedDownloading(project_id, url, lastModified, callback) { _doesUrlNeedDownloading(project_id, url, lastModified, callback) {
if (callback == null) { callback = function(error, needsDownloading) {}; } if (callback == null) {
if ((lastModified == null)) { callback = function(error, needsDownloading) {}
return callback(null, true);
} }
return UrlCache._findUrlDetails(project_id, url, function(error, urlDetails) { if (lastModified == null) {
if (error != null) { return callback(error); } return callback(null, true)
if ((urlDetails == null) || (urlDetails.lastModified == null) || (urlDetails.lastModified.getTime() < lastModified.getTime())) { }
return callback(null, true); return UrlCache._findUrlDetails(project_id, url, function(
error,
urlDetails
) {
if (error != null) {
return callback(error)
}
if (
urlDetails == null ||
urlDetails.lastModified == null ||
urlDetails.lastModified.getTime() < lastModified.getTime()
) {
return callback(null, true)
} else { } else {
return callback(null, false); return callback(null, false)
} }
}); })
}, },
_cacheFileNameForUrl(project_id, url) { _cacheFileNameForUrl(project_id, url) {
return project_id + ":" + crypto.createHash("md5").update(url).digest("hex"); return (
project_id +
':' +
crypto
.createHash('md5')
.update(url)
.digest('hex')
)
}, },
_cacheFilePathForUrl(project_id, url) { _cacheFilePathForUrl(project_id, url) {
return `${Settings.path.clsiCacheDir}/${UrlCache._cacheFileNameForUrl(project_id, url)}`; return `${Settings.path.clsiCacheDir}/${UrlCache._cacheFileNameForUrl(
project_id,
url
)}`
}, },
_copyFile(from, to, _callback) { _copyFile(from, to, _callback) {
if (_callback == null) { _callback = function(error) {}; } if (_callback == null) {
_callback = function(error) {}
}
const callbackOnce = function(error) { const callbackOnce = function(error) {
if (error != null) { if (error != null) {
logger.error({err: error, from, to}, "error copying file from cache"); logger.error({ err: error, from, to }, 'error copying file from cache')
} }
_callback(error); _callback(error)
return _callback = function() {}; return (_callback = function() {})
}; }
const writeStream = fs.createWriteStream(to); const writeStream = fs.createWriteStream(to)
const readStream = fs.createReadStream(from); const readStream = fs.createReadStream(from)
writeStream.on("error", callbackOnce); writeStream.on('error', callbackOnce)
readStream.on("error", callbackOnce); readStream.on('error', callbackOnce)
writeStream.on("close", callbackOnce); writeStream.on('close', callbackOnce)
return writeStream.on("open", () => readStream.pipe(writeStream)); return writeStream.on('open', () => readStream.pipe(writeStream))
}, },
_clearUrlFromCache(project_id, url, callback) { _clearUrlFromCache(project_id, url, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
callback = function(error) {}
}
return UrlCache._clearUrlDetails(project_id, url, function(error) { return UrlCache._clearUrlDetails(project_id, url, function(error) {
if (error != null) { return callback(error); } if (error != null) {
return callback(error)
}
return UrlCache._deleteUrlCacheFromDisk(project_id, url, function(error) { return UrlCache._deleteUrlCacheFromDisk(project_id, url, function(error) {
if (error != null) { return callback(error); } if (error != null) {
return callback(null); return callback(error)
}); }
}); return callback(null)
})
})
}, },
_deleteUrlCacheFromDisk(project_id, url, callback) { _deleteUrlCacheFromDisk(project_id, url, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
return fs.unlink(UrlCache._cacheFilePathForUrl(project_id, url), function(error) { callback = function(error) {}
if ((error != null) && (error.code !== 'ENOENT')) { // no error if the file isn't present
return callback(error);
} else {
return callback();
} }
}); return fs.unlink(UrlCache._cacheFilePathForUrl(project_id, url), function(
error
) {
if (error != null && error.code !== 'ENOENT') {
// no error if the file isn't present
return callback(error)
} else {
return callback()
}
})
}, },
_findUrlDetails(project_id, url, callback) { _findUrlDetails(project_id, url, callback) {
if (callback == null) { callback = function(error, urlDetails) {}; } if (callback == null) {
callback = function(error, urlDetails) {}
}
const job = cb => const job = cb =>
db.UrlCache.find({ where: { url, project_id } }) db.UrlCache.find({ where: { url, project_id } })
.then(urlDetails => cb(null, urlDetails)) .then(urlDetails => cb(null, urlDetails))
.error(cb) .error(cb)
; return dbQueue.queue.push(job, callback)
return dbQueue.queue.push(job, callback);
}, },
_updateOrCreateUrlDetails(project_id, url, lastModified, callback) { _updateOrCreateUrlDetails(project_id, url, lastModified, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
callback = function(error) {}
}
const job = cb => const job = cb =>
db.UrlCache.findOrCreate({ where: { url, project_id } }) db.UrlCache.findOrCreate({ where: { url, project_id } })
.spread( .spread((urlDetails, created) =>
(urlDetails, created) => urlDetails
urlDetails.updateAttributes({lastModified}) .updateAttributes({ lastModified })
.then(() => cb()) .then(() => cb())
.error(cb) .error(cb)
) )
.error(cb) .error(cb)
; return dbQueue.queue.push(job, callback)
return dbQueue.queue.push(job, callback);
}, },
_clearUrlDetails(project_id, url, callback) { _clearUrlDetails(project_id, url, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
callback = function(error) {}
}
const job = cb => const job = cb =>
db.UrlCache.destroy({ where: { url, project_id } }) db.UrlCache.destroy({ where: { url, project_id } })
.then(() => cb(null)) .then(() => cb(null))
.error(cb) .error(cb)
; return dbQueue.queue.push(job, callback)
return dbQueue.queue.push(job, callback);
}, },
_findAllUrlsInProject(project_id, callback) { _findAllUrlsInProject(project_id, callback) {
if (callback == null) { callback = function(error, urls) {}; } if (callback == null) {
callback = function(error, urls) {}
}
const job = cb => const job = cb =>
db.UrlCache.findAll({ where: { project_id } }) db.UrlCache.findAll({ where: { project_id } })
.then( .then(urlEntries =>
urlEntries => cb(null, urlEntries.map(entry => entry.url))) cb(
null,
urlEntries.map(entry => entry.url)
)
)
.error(cb) .error(cb)
; return dbQueue.queue.push(job, callback)
return dbQueue.queue.push(job, callback); }
} }
});

View File

@@ -12,74 +12,94 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let UrlFetcher; let UrlFetcher
const request = require("request").defaults({jar: false}); const request = require('request').defaults({ jar: false })
const fs = require("fs"); const fs = require('fs')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const settings = require("settings-sharelatex"); const settings = require('settings-sharelatex')
const URL = require('url'); const URL = require('url')
const oneMinute = 60 * 1000; const oneMinute = 60 * 1000
module.exports = (UrlFetcher = { module.exports = UrlFetcher = {
pipeUrlToFile(url, filePath, _callback) { pipeUrlToFile(url, filePath, _callback) {
if (_callback == null) { _callback = function(error) {}; } if (_callback == null) {
_callback = function(error) {}
}
const callbackOnce = function(error) { const callbackOnce = function(error) {
if (timeoutHandler != null) { clearTimeout(timeoutHandler); } if (timeoutHandler != null) {
_callback(error); clearTimeout(timeoutHandler)
return _callback = function() {}; }
}; _callback(error)
return (_callback = function() {})
}
if (settings.filestoreDomainOveride != null) { if (settings.filestoreDomainOveride != null) {
const p = URL.parse(url).path; const p = URL.parse(url).path
url = `${settings.filestoreDomainOveride}${p}`; url = `${settings.filestoreDomainOveride}${p}`
}
var timeoutHandler = setTimeout(function() {
timeoutHandler = null;
logger.error({url, filePath}, "Timed out downloading file to cache");
return callbackOnce(new Error(`Timed out downloading file to cache ${url}`));
} }
var timeoutHandler = setTimeout(
function() {
timeoutHandler = null
logger.error({ url, filePath }, 'Timed out downloading file to cache')
return callbackOnce(
new Error(`Timed out downloading file to cache ${url}`)
)
},
// FIXME: maybe need to close fileStream here // FIXME: maybe need to close fileStream here
, 3 * oneMinute); 3 * oneMinute
)
logger.log({url, filePath}, "started downloading url to cache"); logger.log({ url, filePath }, 'started downloading url to cache')
const urlStream = request.get({url, timeout: oneMinute}); const urlStream = request.get({ url, timeout: oneMinute })
urlStream.pause(); // stop data flowing until we are ready urlStream.pause() // stop data flowing until we are ready
// attach handlers before setting up pipes // attach handlers before setting up pipes
urlStream.on("error", function(error) { urlStream.on('error', function(error) {
logger.error({err: error, url, filePath}, "error downloading url"); logger.error({ err: error, url, filePath }, 'error downloading url')
return callbackOnce(error || new Error(`Something went wrong downloading the URL ${url}`)); return callbackOnce(
}); error || new Error(`Something went wrong downloading the URL ${url}`)
)
})
urlStream.on("end", () => logger.log({url, filePath}, "finished downloading file into cache")); urlStream.on('end', () =>
logger.log({ url, filePath }, 'finished downloading file into cache')
)
return urlStream.on("response", function(res) { return urlStream.on('response', function(res) {
if ((res.statusCode >= 200) && (res.statusCode < 300)) { if (res.statusCode >= 200 && res.statusCode < 300) {
const fileStream = fs.createWriteStream(filePath); const fileStream = fs.createWriteStream(filePath)
// attach handlers before setting up pipes // attach handlers before setting up pipes
fileStream.on('error', function(error) { fileStream.on('error', function(error) {
logger.error({err: error, url, filePath}, "error writing file into cache"); logger.error(
{ err: error, url, filePath },
'error writing file into cache'
)
return fs.unlink(filePath, function(err) { return fs.unlink(filePath, function(err) {
if (err != null) { if (err != null) {
logger.err({err, filePath}, "error deleting file from cache"); logger.err({ err, filePath }, 'error deleting file from cache')
} }
return callbackOnce(error); return callbackOnce(error)
}); })
}); })
fileStream.on('finish', function() { fileStream.on('finish', function() {
logger.log({url, filePath}, "finished writing file into cache"); logger.log({ url, filePath }, 'finished writing file into cache')
return callbackOnce(); return callbackOnce()
}); })
fileStream.on('pipe', () => logger.log({url, filePath}, "piping into filestream")); fileStream.on('pipe', () =>
logger.log({ url, filePath }, 'piping into filestream')
)
urlStream.pipe(fileStream); urlStream.pipe(fileStream)
return urlStream.resume(); // now we are ready to handle the data return urlStream.resume() // now we are ready to handle the data
} else { } else {
logger.error({statusCode: res.statusCode, url, filePath}, "unexpected status code downloading url to cache"); logger.error(
{ statusCode: res.statusCode, url, filePath },
'unexpected status code downloading url to cache'
)
// https://nodejs.org/api/http.html#http_class_http_clientrequest // https://nodejs.org/api/http.html#http_class_http_clientrequest
// If you add a 'response' event handler, then you must consume // If you add a 'response' event handler, then you must consume
// the data from the response object, either by calling // the data from the response object, either by calling
@@ -88,9 +108,13 @@ module.exports = (UrlFetcher = {
// method. Until the data is consumed, the 'end' event will not // method. Until the data is consumed, the 'end' event will not
// fire. Also, until the data is read it will consume memory // fire. Also, until the data is read it will consume memory
// that can eventually lead to a 'process out of memory' error. // that can eventually lead to a 'process out of memory' error.
urlStream.resume(); // discard the data urlStream.resume() // discard the data
return callbackOnce(new Error(`URL returned non-success status code: ${res.statusCode} ${url}`)); return callbackOnce(
new Error(
`URL returned non-success status code: ${res.statusCode} ${url}`
)
)
}
})
} }
});
} }
});

View File

@@ -8,57 +8,60 @@
* DS102: Remove unnecessary code created because of implicit returns * DS102: Remove unnecessary code created because of implicit returns
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
const Sequelize = require("sequelize"); const Sequelize = require('sequelize')
const Settings = require("settings-sharelatex"); const Settings = require('settings-sharelatex')
const _ = require("underscore"); const _ = require('underscore')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const options = _.extend({logging:false}, Settings.mysql.clsi); const options = _.extend({ logging: false }, Settings.mysql.clsi)
logger.log({dbPath:Settings.mysql.clsi.storage}, "connecting to db"); logger.log({ dbPath: Settings.mysql.clsi.storage }, 'connecting to db')
const sequelize = new Sequelize( const sequelize = new Sequelize(
Settings.mysql.clsi.database, Settings.mysql.clsi.database,
Settings.mysql.clsi.username, Settings.mysql.clsi.username,
Settings.mysql.clsi.password, Settings.mysql.clsi.password,
options options
); )
if (Settings.mysql.clsi.dialect === "sqlite") { if (Settings.mysql.clsi.dialect === 'sqlite') {
logger.log("running PRAGMA journal_mode=WAL;"); logger.log('running PRAGMA journal_mode=WAL;')
sequelize.query("PRAGMA journal_mode=WAL;"); sequelize.query('PRAGMA journal_mode=WAL;')
sequelize.query("PRAGMA synchronous=OFF;"); sequelize.query('PRAGMA synchronous=OFF;')
sequelize.query("PRAGMA read_uncommitted = true;"); sequelize.query('PRAGMA read_uncommitted = true;')
} }
module.exports = { module.exports = {
UrlCache: sequelize.define("UrlCache", { UrlCache: sequelize.define(
'UrlCache',
{
url: Sequelize.STRING, url: Sequelize.STRING,
project_id: Sequelize.STRING, project_id: Sequelize.STRING,
lastModified: Sequelize.DATE lastModified: Sequelize.DATE
}, { },
indexes: [ {
{fields: ['url', 'project_id']}, indexes: [{ fields: ['url', 'project_id'] }, { fields: ['project_id'] }]
{fields: ['project_id']} }
] ),
}),
Project: sequelize.define("Project", { Project: sequelize.define(
'Project',
{
project_id: { type: Sequelize.STRING, primaryKey: true }, project_id: { type: Sequelize.STRING, primaryKey: true },
lastAccessed: Sequelize.DATE lastAccessed: Sequelize.DATE
}, { },
indexes: [ {
{fields: ['lastAccessed']} indexes: [{ fields: ['lastAccessed'] }]
] }
}), ),
op: Sequelize.Op, op: Sequelize.Op,
sync() { sync() {
logger.log({dbPath:Settings.mysql.clsi.storage}, "syncing db schema"); logger.log({ dbPath: Settings.mysql.clsi.storage }, 'syncing db schema')
return sequelize.sync() return sequelize
.then(() => logger.log("db sync complete")).catch(err=> console.log(err, "error syncing")); .sync()
.then(() => logger.log('db sync complete'))
.catch(err => console.log(err, 'error syncing'))
}
} }
};