prettier: convert app/js decaffeinated files to Prettier format

This commit is contained in:
mserranom
2020-02-19 12:14:37 +01:00
parent 4576ef54fb
commit cffbd4e9ef
26 changed files with 3881 additions and 2639 deletions

View File

@@ -5,16 +5,16 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let commandRunnerPath; let commandRunnerPath
const Settings = require("settings-sharelatex"); const Settings = require('settings-sharelatex')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
if ((Settings.clsi != null ? Settings.clsi.dockerRunner : undefined) === true) { if ((Settings.clsi != null ? Settings.clsi.dockerRunner : undefined) === true) {
commandRunnerPath = "./DockerRunner"; commandRunnerPath = './DockerRunner'
} else { } else {
commandRunnerPath = "./LocalCommandRunner"; commandRunnerPath = './LocalCommandRunner'
} }
logger.info({commandRunnerPath}, "selecting command runner for clsi"); logger.info({ commandRunnerPath }, 'selecting command runner for clsi')
const CommandRunner = require(commandRunnerPath); const CommandRunner = require(commandRunnerPath)
module.exports = CommandRunner; module.exports = CommandRunner

View File

@@ -12,159 +12,227 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let CompileController; let CompileController
const RequestParser = require("./RequestParser"); const RequestParser = require('./RequestParser')
const CompileManager = require("./CompileManager"); const CompileManager = require('./CompileManager')
const Settings = require("settings-sharelatex"); const Settings = require('settings-sharelatex')
const Metrics = require("./Metrics"); const Metrics = require('./Metrics')
const ProjectPersistenceManager = require("./ProjectPersistenceManager"); const ProjectPersistenceManager = require('./ProjectPersistenceManager')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const Errors = require("./Errors"); const Errors = require('./Errors')
module.exports = (CompileController = { module.exports = CompileController = {
compile(req, res, next) { compile(req, res, next) {
if (next == null) { next = function(error) {}; } if (next == null) {
const timer = new Metrics.Timer("compile-request"); next = function(error) {}
return RequestParser.parse(req.body, function(error, request) { }
if (error != null) { return next(error); } const timer = new Metrics.Timer('compile-request')
request.project_id = req.params.project_id; return RequestParser.parse(req.body, function(error, request) {
if (req.params.user_id != null) { request.user_id = req.params.user_id; } if (error != null) {
return ProjectPersistenceManager.markProjectAsJustAccessed(request.project_id, function(error) { return next(error)
if (error != null) { return next(error); } }
return CompileManager.doCompileWithLock(request, function(error, outputFiles) { request.project_id = req.params.project_id
let code, status; if (req.params.user_id != null) {
if (outputFiles == null) { outputFiles = []; } request.user_id = req.params.user_id
if (error instanceof Errors.AlreadyCompilingError) { }
code = 423; // Http 423 Locked return ProjectPersistenceManager.markProjectAsJustAccessed(
status = "compile-in-progress"; request.project_id,
} else if (error instanceof Errors.FilesOutOfSyncError) { function(error) {
code = 409; // Http 409 Conflict if (error != null) {
status = "retry"; return next(error)
} else if (error != null ? error.terminated : undefined) { }
status = "terminated"; return CompileManager.doCompileWithLock(request, function(
} else if (error != null ? error.validate : undefined) { error,
status = `validation-${error.validate}`; outputFiles
} else if (error != null ? error.timedout : undefined) { ) {
status = "timedout"; let code, status
logger.log({err: error, project_id: request.project_id}, "timeout running compile"); if (outputFiles == null) {
} else if (error != null) { outputFiles = []
status = "error"; }
code = 500; if (error instanceof Errors.AlreadyCompilingError) {
logger.warn({err: error, project_id: request.project_id}, "error running compile"); code = 423 // Http 423 Locked
} else { status = 'compile-in-progress'
let file; } else if (error instanceof Errors.FilesOutOfSyncError) {
status = "failure"; code = 409 // Http 409 Conflict
for (file of Array.from(outputFiles)) { status = 'retry'
if (file.path != null ? file.path.match(/output\.pdf$/) : undefined) { } else if (error != null ? error.terminated : undefined) {
status = "success"; status = 'terminated'
} } else if (error != null ? error.validate : undefined) {
} status = `validation-${error.validate}`
} else if (error != null ? error.timedout : undefined) {
status = 'timedout'
logger.log(
{ err: error, project_id: request.project_id },
'timeout running compile'
)
} else if (error != null) {
status = 'error'
code = 500
logger.warn(
{ err: error, project_id: request.project_id },
'error running compile'
)
} else {
let file
status = 'failure'
for (file of Array.from(outputFiles)) {
if (
file.path != null
? file.path.match(/output\.pdf$/)
: undefined
) {
status = 'success'
}
}
if (status === "failure") { if (status === 'failure') {
logger.warn({project_id: request.project_id, outputFiles}, "project failed to compile successfully, no output.pdf generated"); logger.warn(
} { project_id: request.project_id, outputFiles },
'project failed to compile successfully, no output.pdf generated'
)
}
// log an error if any core files are found // log an error if any core files are found
for (file of Array.from(outputFiles)) { for (file of Array.from(outputFiles)) {
if (file.path === "core") { if (file.path === 'core') {
logger.error({project_id:request.project_id, req, outputFiles}, "core file found in output"); logger.error(
} { project_id: request.project_id, req, outputFiles },
} 'core file found in output'
} )
}
}
}
if (error != null) { if (error != null) {
outputFiles = error.outputFiles || []; outputFiles = error.outputFiles || []
} }
timer.done(); timer.done()
return res.status(code || 200).send({ return res.status(code || 200).send({
compile: { compile: {
status, status,
error: (error != null ? error.message : undefined) || error, error: (error != null ? error.message : undefined) || error,
outputFiles: outputFiles.map(file => outputFiles: outputFiles.map(file => ({
({ url:
url: `${Settings.apis.clsi.url}/project/${request.project_id}` +
`${Settings.apis.clsi.url}/project/${request.project_id}` + (request.user_id != null
((request.user_id != null) ? `/user/${request.user_id}` : "") + ? `/user/${request.user_id}`
((file.build != null) ? `/build/${file.build}` : "") + : '') +
`/output/${file.path}`, (file.build != null ? `/build/${file.build}` : '') +
path: file.path, `/output/${file.path}`,
type: file.type, path: file.path,
build: file.build type: file.type,
}) build: file.build
) }))
} }
}); })
}); })
}); }
}); )
}, })
},
stopCompile(req, res, next) { stopCompile(req, res, next) {
const {project_id, user_id} = req.params; const { project_id, user_id } = req.params
return CompileManager.stopCompile(project_id, user_id, function(error) { return CompileManager.stopCompile(project_id, user_id, function(error) {
if (error != null) { return next(error); } if (error != null) {
return res.sendStatus(204); return next(error)
}); }
}, return res.sendStatus(204)
})
},
clearCache(req, res, next) { clearCache(req, res, next) {
if (next == null) { next = function(error) {}; } if (next == null) {
return ProjectPersistenceManager.clearProject(req.params.project_id, req.params.user_id, function(error) { next = function(error) {}
if (error != null) { return next(error); } }
return res.sendStatus(204); return ProjectPersistenceManager.clearProject(
}); req.params.project_id,
}, // No content req.params.user_id,
function(error) {
if (error != null) {
return next(error)
}
return res.sendStatus(204)
}
)
}, // No content
syncFromCode(req, res, next) { syncFromCode(req, res, next) {
if (next == null) { next = function(error) {}; } if (next == null) {
const { file } = req.query; next = function(error) {}
const line = parseInt(req.query.line, 10); }
const column = parseInt(req.query.column, 10); const { file } = req.query
const { project_id } = req.params; const line = parseInt(req.query.line, 10)
const { user_id } = req.params; const column = parseInt(req.query.column, 10)
return CompileManager.syncFromCode(project_id, user_id, file, line, column, function(error, pdfPositions) { const { project_id } = req.params
if (error != null) { return next(error); } const { user_id } = req.params
return res.json({ return CompileManager.syncFromCode(
pdf: pdfPositions project_id,
}); user_id,
}); file,
}, line,
column,
function(error, pdfPositions) {
if (error != null) {
return next(error)
}
return res.json({
pdf: pdfPositions
})
}
)
},
syncFromPdf(req, res, next) { syncFromPdf(req, res, next) {
if (next == null) { next = function(error) {}; } if (next == null) {
const page = parseInt(req.query.page, 10); next = function(error) {}
const h = parseFloat(req.query.h); }
const v = parseFloat(req.query.v); const page = parseInt(req.query.page, 10)
const { project_id } = req.params; const h = parseFloat(req.query.h)
const { user_id } = req.params; const v = parseFloat(req.query.v)
return CompileManager.syncFromPdf(project_id, user_id, page, h, v, function(error, codePositions) { const { project_id } = req.params
if (error != null) { return next(error); } const { user_id } = req.params
return res.json({ return CompileManager.syncFromPdf(project_id, user_id, page, h, v, function(
code: codePositions error,
}); codePositions
}); ) {
}, if (error != null) {
return next(error)
}
return res.json({
code: codePositions
})
})
},
wordcount(req, res, next) { wordcount(req, res, next) {
if (next == null) { next = function(error) {}; } if (next == null) {
const file = req.query.file || "main.tex"; next = function(error) {}
const { project_id } = req.params; }
const { user_id } = req.params; const file = req.query.file || 'main.tex'
const { image } = req.query; const { project_id } = req.params
logger.log({image, file, project_id}, "word count request"); const { user_id } = req.params
const { image } = req.query
logger.log({ image, file, project_id }, 'word count request')
return CompileManager.wordcount(project_id, user_id, file, image, function(error, result) { return CompileManager.wordcount(project_id, user_id, file, image, function(
if (error != null) { return next(error); } error,
return res.json({ result
texcount: result ) {
}); if (error != null) {
}); return next(error)
}, }
return res.json({
status(req, res, next ){ texcount: result
if (next == null) { next = function(error){}; } })
return res.send("OK"); })
} },
});
status(req, res, next) {
if (next == null) {
next = function(error) {}
}
return res.send('OK')
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -3,31 +3,36 @@
*/ */
// TODO: This file was created by bulk-decaffeinate. // TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint. // Fix any style issues and re-enable lint.
let ContentTypeMapper; let ContentTypeMapper
const Path = require('path'); const Path = require('path')
// here we coerce html, css and js to text/plain, // here we coerce html, css and js to text/plain,
// otherwise choose correct mime type based on file extension, // otherwise choose correct mime type based on file extension,
// falling back to octet-stream // falling back to octet-stream
module.exports = (ContentTypeMapper = { module.exports = ContentTypeMapper = {
map(path) { map(path) {
switch (Path.extname(path)) { switch (Path.extname(path)) {
case '.txt': case '.html': case '.js': case '.css': case '.svg': case '.txt':
return 'text/plain'; case '.html':
case '.csv': case '.js':
return 'text/csv'; case '.css':
case '.pdf': case '.svg':
return 'application/pdf'; return 'text/plain'
case '.png': case '.csv':
return 'image/png'; return 'text/csv'
case '.jpg': case '.jpeg': case '.pdf':
return 'image/jpeg'; return 'application/pdf'
case '.tiff': case '.png':
return 'image/tiff'; return 'image/png'
case '.gif': case '.jpg':
return 'image/gif'; case '.jpeg':
default: return 'image/jpeg'
return 'application/octet-stream'; case '.tiff':
} return 'image/tiff'
} case '.gif':
}); return 'image/gif'
default:
return 'application/octet-stream'
}
}
}

View File

@@ -5,14 +5,14 @@
* DS102: Remove unnecessary code created because of implicit returns * DS102: Remove unnecessary code created because of implicit returns
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
const async = require("async"); const async = require('async')
const Settings = require("settings-sharelatex"); const Settings = require('settings-sharelatex')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const queue = async.queue((task, cb)=> task(cb) const queue = async.queue(
, Settings.parallelSqlQueryLimit); (task, cb) => task(cb),
Settings.parallelSqlQueryLimit
)
queue.drain = ()=> logger.debug('all items have been processed'); queue.drain = () => logger.debug('all items have been processed')
module.exports =
{queue};
module.exports = { queue }

View File

@@ -10,80 +10,104 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let LockManager; let LockManager
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const LockState = {}; // locks for docker container operations, by container name const LockState = {} // locks for docker container operations, by container name
module.exports = (LockManager = { module.exports = LockManager = {
MAX_LOCK_HOLD_TIME: 15000, // how long we can keep a lock
MAX_LOCK_WAIT_TIME: 10000, // how long we wait for a lock
LOCK_TEST_INTERVAL: 1000, // retry time
MAX_LOCK_HOLD_TIME: 15000, // how long we can keep a lock tryLock(key, callback) {
MAX_LOCK_WAIT_TIME: 10000, // how long we wait for a lock let lockValue
LOCK_TEST_INTERVAL: 1000, // retry time if (callback == null) {
callback = function(err, gotLock) {}
}
const existingLock = LockState[key]
if (existingLock != null) {
// the lock is already taken, check how old it is
const lockAge = Date.now() - existingLock.created
if (lockAge < LockManager.MAX_LOCK_HOLD_TIME) {
return callback(null, false) // we didn't get the lock, bail out
} else {
logger.error(
{ key, lock: existingLock, age: lockAge },
'taking old lock by force'
)
}
}
// take the lock
LockState[key] = lockValue = { created: Date.now() }
return callback(null, true, lockValue)
},
tryLock(key, callback) { getLock(key, callback) {
let lockValue; let attempt
if (callback == null) { callback = function(err, gotLock) {}; } if (callback == null) {
const existingLock = LockState[key]; callback = function(error, lockValue) {}
if (existingLock != null) { // the lock is already taken, check how old it is }
const lockAge = Date.now() - existingLock.created; const startTime = Date.now()
if (lockAge < LockManager.MAX_LOCK_HOLD_TIME) { return (attempt = () =>
return callback(null, false); // we didn't get the lock, bail out LockManager.tryLock(key, function(error, gotLock, lockValue) {
} else { if (error != null) {
logger.error({key, lock: existingLock, age:lockAge}, "taking old lock by force"); return callback(error)
} }
} if (gotLock) {
// take the lock return callback(null, lockValue)
LockState[key] = (lockValue = {created: Date.now()}); } else if (Date.now() - startTime > LockManager.MAX_LOCK_WAIT_TIME) {
return callback(null, true, lockValue); const e = new Error('Lock timeout')
}, e.key = key
return callback(e)
} else {
return setTimeout(attempt, LockManager.LOCK_TEST_INTERVAL)
}
}))()
},
getLock(key, callback) { releaseLock(key, lockValue, callback) {
let attempt; if (callback == null) {
if (callback == null) { callback = function(error, lockValue) {}; } callback = function(error) {}
const startTime = Date.now(); }
return (attempt = () => const existingLock = LockState[key]
LockManager.tryLock(key, function(error, gotLock, lockValue) { if (existingLock === lockValue) {
if (error != null) { return callback(error); } // lockValue is an object, so we can test by reference
if (gotLock) { delete LockState[key] // our lock, so we can free it
return callback(null, lockValue); return callback()
} else if ((Date.now() - startTime) > LockManager.MAX_LOCK_WAIT_TIME) { } else if (existingLock != null) {
const e = new Error("Lock timeout"); // lock exists but doesn't match ours
e.key = key; logger.error(
return callback(e); { key, lock: existingLock },
} else { 'tried to release lock taken by force'
return setTimeout(attempt, LockManager.LOCK_TEST_INTERVAL); )
} return callback()
}) } else {
)(); logger.error(
}, { key, lock: existingLock },
'tried to release lock that has gone'
)
return callback()
}
},
releaseLock(key, lockValue, callback) { runWithLock(key, runner, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
const existingLock = LockState[key]; callback = function(error) {}
if (existingLock === lockValue) { // lockValue is an object, so we can test by reference }
delete LockState[key]; // our lock, so we can free it return LockManager.getLock(key, function(error, lockValue) {
return callback(); if (error != null) {
} else if (existingLock != null) { // lock exists but doesn't match ours return callback(error)
logger.error({key, lock: existingLock}, "tried to release lock taken by force"); }
return callback(); return runner((error1, ...args) =>
} else { LockManager.releaseLock(key, lockValue, function(error2) {
logger.error({key, lock: existingLock}, "tried to release lock that has gone"); error = error1 || error2
return callback(); if (error != null) {
} return callback(error)
}, }
return callback(null, ...Array.from(args))
runWithLock(key, runner, callback) { })
if (callback == null) { callback = function(error) {}; } )
return LockManager.getLock(key, function(error, lockValue) { })
if (error != null) { return callback(error); } }
return runner((error1, ...args) => }
LockManager.releaseLock(key, lockValue, function(error2) {
error = error1 || error2;
if (error != null) { return callback(error); }
return callback(null, ...Array.from(args));
})
);
});
}
});

File diff suppressed because it is too large Load Diff

View File

@@ -11,34 +11,47 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let DraftModeManager; let DraftModeManager
const fs = require("fs"); const fs = require('fs')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
module.exports = (DraftModeManager = { module.exports = DraftModeManager = {
injectDraftMode(filename, callback) { injectDraftMode(filename, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
return fs.readFile(filename, "utf8", function(error, content) { callback = function(error) {}
if (error != null) { return callback(error); } }
// avoid adding draft mode more than once return fs.readFile(filename, 'utf8', function(error, content) {
if ((content != null ? content.indexOf("\\documentclass\[draft") : undefined) >= 0) { if (error != null) {
return callback(); return callback(error)
} }
const modified_content = DraftModeManager._injectDraftOption(content); // avoid adding draft mode more than once
logger.log({ if (
content: content.slice(0,1024), // \documentclass is normally v near the top (content != null
modified_content: modified_content.slice(0,1024), ? content.indexOf('\\documentclass[draft')
filename : undefined) >= 0
}, "injected draft class"); ) {
return fs.writeFile(filename, modified_content, callback); return callback()
}); }
}, const modified_content = DraftModeManager._injectDraftOption(content)
logger.log(
_injectDraftOption(content) { {
return content content: content.slice(0, 1024), // \documentclass is normally v near the top
// With existing options (must be first, otherwise both are applied) modified_content: modified_content.slice(0, 1024),
.replace(/\\documentclass\[/g, "\\documentclass[draft,") filename
// Without existing options },
.replace(/\\documentclass\{/g, "\\documentclass[draft]{"); 'injected draft class'
} )
}); return fs.writeFile(filename, modified_content, callback)
})
},
_injectDraftOption(content) {
return (
content
// With existing options (must be first, otherwise both are applied)
.replace(/\\documentclass\[/g, '\\documentclass[draft,')
// Without existing options
.replace(/\\documentclass\{/g, '\\documentclass[draft]{')
)
}
}

View File

@@ -4,33 +4,33 @@
*/ */
// TODO: This file was created by bulk-decaffeinate. // TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint. // Fix any style issues and re-enable lint.
let Errors; let Errors
var NotFoundError = function(message) { var NotFoundError = function(message) {
const error = new Error(message); const error = new Error(message)
error.name = "NotFoundError"; error.name = 'NotFoundError'
error.__proto__ = NotFoundError.prototype; error.__proto__ = NotFoundError.prototype
return error; return error
}; }
NotFoundError.prototype.__proto__ = Error.prototype; NotFoundError.prototype.__proto__ = Error.prototype
var FilesOutOfSyncError = function(message) { var FilesOutOfSyncError = function(message) {
const error = new Error(message); const error = new Error(message)
error.name = "FilesOutOfSyncError"; error.name = 'FilesOutOfSyncError'
error.__proto__ = FilesOutOfSyncError.prototype; error.__proto__ = FilesOutOfSyncError.prototype
return error; return error
}; }
FilesOutOfSyncError.prototype.__proto__ = Error.prototype; FilesOutOfSyncError.prototype.__proto__ = Error.prototype
var AlreadyCompilingError = function(message) { var AlreadyCompilingError = function(message) {
const error = new Error(message); const error = new Error(message)
error.name = "AlreadyCompilingError"; error.name = 'AlreadyCompilingError'
error.__proto__ = AlreadyCompilingError.prototype; error.__proto__ = AlreadyCompilingError.prototype
return error; return error
}; }
AlreadyCompilingError.prototype.__proto__ = Error.prototype; AlreadyCompilingError.prototype.__proto__ = Error.prototype
module.exports = (Errors = { module.exports = Errors = {
NotFoundError, NotFoundError,
FilesOutOfSyncError, FilesOutOfSyncError,
AlreadyCompilingError AlreadyCompilingError
}); }

View File

@@ -13,119 +13,192 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let LatexRunner; let LatexRunner
const Path = require("path"); const Path = require('path')
const Settings = require("settings-sharelatex"); const Settings = require('settings-sharelatex')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const Metrics = require("./Metrics"); const Metrics = require('./Metrics')
const CommandRunner = require("./CommandRunner"); const CommandRunner = require('./CommandRunner')
const ProcessTable = {}; // table of currently running jobs (pids or docker container names) const ProcessTable = {} // table of currently running jobs (pids or docker container names)
module.exports = (LatexRunner = { module.exports = LatexRunner = {
runLatex(project_id, options, callback) { runLatex(project_id, options, callback) {
let command; let command
if (callback == null) { callback = function(error) {}; } if (callback == null) {
let {directory, mainFile, compiler, timeout, image, environment, flags} = options; callback = function(error) {}
if (!compiler) { compiler = "pdflatex"; } }
if (!timeout) { timeout = 60000; } // milliseconds let {
directory,
mainFile,
compiler,
timeout,
image,
environment,
flags
} = options
if (!compiler) {
compiler = 'pdflatex'
}
if (!timeout) {
timeout = 60000
} // milliseconds
logger.log({directory, compiler, timeout, mainFile, environment, flags}, "starting compile"); logger.log(
{ directory, compiler, timeout, mainFile, environment, flags },
'starting compile'
)
// We want to run latexmk on the tex file which we will automatically // We want to run latexmk on the tex file which we will automatically
// generate from the Rtex/Rmd/md file. // generate from the Rtex/Rmd/md file.
mainFile = mainFile.replace(/\.(Rtex|md|Rmd)$/, ".tex"); mainFile = mainFile.replace(/\.(Rtex|md|Rmd)$/, '.tex')
if (compiler === "pdflatex") { if (compiler === 'pdflatex') {
command = LatexRunner._pdflatexCommand(mainFile, flags); command = LatexRunner._pdflatexCommand(mainFile, flags)
} else if (compiler === "latex") { } else if (compiler === 'latex') {
command = LatexRunner._latexCommand(mainFile, flags); command = LatexRunner._latexCommand(mainFile, flags)
} else if (compiler === "xelatex") { } else if (compiler === 'xelatex') {
command = LatexRunner._xelatexCommand(mainFile, flags); command = LatexRunner._xelatexCommand(mainFile, flags)
} else if (compiler === "lualatex") { } else if (compiler === 'lualatex') {
command = LatexRunner._lualatexCommand(mainFile, flags); command = LatexRunner._lualatexCommand(mainFile, flags)
} else { } else {
return callback(new Error(`unknown compiler: ${compiler}`)); return callback(new Error(`unknown compiler: ${compiler}`))
} }
if (Settings.clsi != null ? Settings.clsi.strace : undefined) { if (Settings.clsi != null ? Settings.clsi.strace : undefined) {
command = ["strace", "-o", "strace", "-ff"].concat(command); command = ['strace', '-o', 'strace', '-ff'].concat(command)
} }
const id = `${project_id}`; // record running project under this id const id = `${project_id}` // record running project under this id
return ProcessTable[id] = CommandRunner.run(project_id, command, directory, image, timeout, environment, function(error, output) { return (ProcessTable[id] = CommandRunner.run(
delete ProcessTable[id]; project_id,
if (error != null) { return callback(error); } command,
const runs = __guard__(__guard__(output != null ? output.stderr : undefined, x1 => x1.match(/^Run number \d+ of .*latex/mg)), x => x.length) || 0; directory,
const failed = (__guard__(output != null ? output.stdout : undefined, x2 => x2.match(/^Latexmk: Errors/m)) != null) ? 1 : 0; image,
// counters from latexmk output timeout,
const stats = {}; environment,
stats["latexmk-errors"] = failed; function(error, output) {
stats["latex-runs"] = runs; delete ProcessTable[id]
stats["latex-runs-with-errors"] = failed ? runs : 0; if (error != null) {
stats[`latex-runs-${runs}`] = 1; return callback(error)
stats[`latex-runs-with-errors-${runs}`] = failed ? 1 : 0; }
// timing information from /usr/bin/time const runs =
const timings = {}; __guard__(
const stderr = output != null ? output.stderr : undefined; __guard__(output != null ? output.stderr : undefined, x1 =>
timings["cpu-percent"] = __guard__(stderr != null ? stderr.match(/Percent of CPU this job got: (\d+)/m) : undefined, x3 => x3[1]) || 0; x1.match(/^Run number \d+ of .*latex/gm)
timings["cpu-time"] = __guard__(stderr != null ? stderr.match(/User time.*: (\d+.\d+)/m) : undefined, x4 => x4[1]) || 0; ),
timings["sys-time"] = __guard__(stderr != null ? stderr.match(/System time.*: (\d+.\d+)/m) : undefined, x5 => x5[1]) || 0; x => x.length
return callback(error, output, stats, timings); ) || 0
}); const failed =
}, __guard__(output != null ? output.stdout : undefined, x2 =>
x2.match(/^Latexmk: Errors/m)
) != null
? 1
: 0
// counters from latexmk output
const stats = {}
stats['latexmk-errors'] = failed
stats['latex-runs'] = runs
stats['latex-runs-with-errors'] = failed ? runs : 0
stats[`latex-runs-${runs}`] = 1
stats[`latex-runs-with-errors-${runs}`] = failed ? 1 : 0
// timing information from /usr/bin/time
const timings = {}
const stderr = output != null ? output.stderr : undefined
timings['cpu-percent'] =
__guard__(
stderr != null
? stderr.match(/Percent of CPU this job got: (\d+)/m)
: undefined,
x3 => x3[1]
) || 0
timings['cpu-time'] =
__guard__(
stderr != null
? stderr.match(/User time.*: (\d+.\d+)/m)
: undefined,
x4 => x4[1]
) || 0
timings['sys-time'] =
__guard__(
stderr != null
? stderr.match(/System time.*: (\d+.\d+)/m)
: undefined,
x5 => x5[1]
) || 0
return callback(error, output, stats, timings)
}
))
},
killLatex(project_id, callback) { killLatex(project_id, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
const id = `${project_id}`; callback = function(error) {}
logger.log({id}, "killing running compile"); }
if ((ProcessTable[id] == null)) { const id = `${project_id}`
logger.warn({id}, "no such project to kill"); logger.log({ id }, 'killing running compile')
return callback(null); if (ProcessTable[id] == null) {
} else { logger.warn({ id }, 'no such project to kill')
return CommandRunner.kill(ProcessTable[id], callback); return callback(null)
} } else {
}, return CommandRunner.kill(ProcessTable[id], callback)
}
},
_latexmkBaseCommand(flags) { _latexmkBaseCommand(flags) {
let args = ["latexmk", "-cd", "-f", "-jobname=output", "-auxdir=$COMPILE_DIR", "-outdir=$COMPILE_DIR", "-synctex=1","-interaction=batchmode"]; let args = [
if (flags) { 'latexmk',
args = args.concat(flags); '-cd',
} '-f',
return (__guard__(Settings != null ? Settings.clsi : undefined, x => x.latexmkCommandPrefix) || []).concat(args); '-jobname=output',
}, '-auxdir=$COMPILE_DIR',
'-outdir=$COMPILE_DIR',
'-synctex=1',
'-interaction=batchmode'
]
if (flags) {
args = args.concat(flags)
}
return (
__guard__(
Settings != null ? Settings.clsi : undefined,
x => x.latexmkCommandPrefix
) || []
).concat(args)
},
_pdflatexCommand(mainFile, flags) { _pdflatexCommand(mainFile, flags) {
return LatexRunner._latexmkBaseCommand(flags).concat([ return LatexRunner._latexmkBaseCommand(flags).concat([
"-pdf", '-pdf',
Path.join("$COMPILE_DIR", mainFile) Path.join('$COMPILE_DIR', mainFile)
]); ])
}, },
_latexCommand(mainFile, flags) { _latexCommand(mainFile, flags) {
return LatexRunner._latexmkBaseCommand(flags).concat([ return LatexRunner._latexmkBaseCommand(flags).concat([
"-pdfdvi", '-pdfdvi',
Path.join("$COMPILE_DIR", mainFile) Path.join('$COMPILE_DIR', mainFile)
]); ])
}, },
_xelatexCommand(mainFile, flags) { _xelatexCommand(mainFile, flags) {
return LatexRunner._latexmkBaseCommand(flags).concat([ return LatexRunner._latexmkBaseCommand(flags).concat([
"-xelatex", '-xelatex',
Path.join("$COMPILE_DIR", mainFile) Path.join('$COMPILE_DIR', mainFile)
]); ])
}, },
_lualatexCommand(mainFile, flags) {
return LatexRunner._latexmkBaseCommand(flags).concat([
"-lualatex",
Path.join("$COMPILE_DIR", mainFile)
]);
}
});
_lualatexCommand(mainFile, flags) {
return LatexRunner._latexmkBaseCommand(flags).concat([
'-lualatex',
Path.join('$COMPILE_DIR', mainFile)
])
}
}
function __guard__(value, transform) { function __guard__(value, transform) {
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; return typeof value !== 'undefined' && value !== null
} ? transform(value)
: undefined
}

View File

@@ -13,62 +13,79 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let CommandRunner; let CommandRunner
const { spawn } = require("child_process"); const { spawn } = require('child_process')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
logger.info("using standard command runner"); logger.info('using standard command runner')
module.exports = (CommandRunner = { module.exports = CommandRunner = {
run(project_id, command, directory, image, timeout, environment, callback) { run(project_id, command, directory, image, timeout, environment, callback) {
let key, value; let key, value
if (callback == null) { callback = function(error) {}; } if (callback == null) {
command = (Array.from(command).map((arg) => arg.toString().replace('$COMPILE_DIR', directory))); callback = function(error) {}
logger.log({project_id, command, directory}, "running command"); }
logger.warn("timeouts and sandboxing are not enabled with CommandRunner"); command = Array.from(command).map(arg =>
arg.toString().replace('$COMPILE_DIR', directory)
)
logger.log({ project_id, command, directory }, 'running command')
logger.warn('timeouts and sandboxing are not enabled with CommandRunner')
// merge environment settings // merge environment settings
const env = {}; const env = {}
for (key in process.env) { value = process.env[key]; env[key] = value; } for (key in process.env) {
for (key in environment) { value = environment[key]; env[key] = value; } value = process.env[key]
env[key] = value
}
for (key in environment) {
value = environment[key]
env[key] = value
}
// run command as detached process so it has its own process group (which can be killed if needed) // run command as detached process so it has its own process group (which can be killed if needed)
const proc = spawn(command[0], command.slice(1), {cwd: directory, env}); const proc = spawn(command[0], command.slice(1), { cwd: directory, env })
let stdout = ""; let stdout = ''
proc.stdout.on("data", data=> stdout += data); proc.stdout.on('data', data => (stdout += data))
proc.on("error", function(err){ proc.on('error', function(err) {
logger.err({err, project_id, command, directory}, "error running command"); logger.err(
return callback(err); { err, project_id, command, directory },
}); 'error running command'
)
return callback(err)
})
proc.on("close", function(code, signal) { proc.on('close', function(code, signal) {
let err; let err
logger.info({code, signal, project_id}, "command exited"); logger.info({ code, signal, project_id }, 'command exited')
if (signal === 'SIGTERM') { // signal from kill method below if (signal === 'SIGTERM') {
err = new Error("terminated"); // signal from kill method below
err.terminated = true; err = new Error('terminated')
return callback(err); err.terminated = true
} else if (code === 1) { // exit status from chktex return callback(err)
err = new Error("exited"); } else if (code === 1) {
err.code = code; // exit status from chktex
return callback(err); err = new Error('exited')
} else { err.code = code
return callback(null, {"stdout": stdout}); return callback(err)
} } else {
}); return callback(null, { stdout: stdout })
}
})
return proc.pid; return proc.pid
}, // return process id to allow job to be killed if necessary }, // return process id to allow job to be killed if necessary
kill(pid, callback) { kill(pid, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
try { callback = function(error) {}
process.kill(-pid); // kill all processes in group }
} catch (err) { try {
return callback(err); process.kill(-pid) // kill all processes in group
} } catch (err) {
return callback(); return callback(err)
} }
}); return callback()
}
}

View File

@@ -11,46 +11,62 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let LockManager; let LockManager
const Settings = require('settings-sharelatex'); const Settings = require('settings-sharelatex')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const Lockfile = require('lockfile'); // from https://github.com/npm/lockfile const Lockfile = require('lockfile') // from https://github.com/npm/lockfile
const Errors = require("./Errors"); const Errors = require('./Errors')
const fs = require("fs"); const fs = require('fs')
const Path = require("path"); const Path = require('path')
module.exports = (LockManager = { module.exports = LockManager = {
LOCK_TEST_INTERVAL: 1000, // 50ms between each test of the lock LOCK_TEST_INTERVAL: 1000, // 50ms between each test of the lock
MAX_LOCK_WAIT_TIME: 15000, // 10s maximum time to spend trying to get the lock MAX_LOCK_WAIT_TIME: 15000, // 10s maximum time to spend trying to get the lock
LOCK_STALE: 5*60*1000, // 5 mins time until lock auto expires LOCK_STALE: 5 * 60 * 1000, // 5 mins time until lock auto expires
runWithLock(path, runner, callback) { runWithLock(path, runner, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
const lockOpts = { callback = function(error) {}
wait: this.MAX_LOCK_WAIT_TIME, }
pollPeriod: this.LOCK_TEST_INTERVAL, const lockOpts = {
stale: this.LOCK_STALE wait: this.MAX_LOCK_WAIT_TIME,
}; pollPeriod: this.LOCK_TEST_INTERVAL,
return Lockfile.lock(path, lockOpts, function(error) { stale: this.LOCK_STALE
if ((error != null ? error.code : undefined) === 'EEXIST') { }
return callback(new Errors.AlreadyCompilingError("compile in progress")); return Lockfile.lock(path, lockOpts, function(error) {
} else if (error != null) { if ((error != null ? error.code : undefined) === 'EEXIST') {
return fs.lstat(path, (statLockErr, statLock)=> return callback(new Errors.AlreadyCompilingError('compile in progress'))
fs.lstat(Path.dirname(path), (statDirErr, statDir)=> } else if (error != null) {
fs.readdir(Path.dirname(path), function(readdirErr, readdirDir){ return fs.lstat(path, (statLockErr, statLock) =>
logger.err({error, path, statLock, statLockErr, statDir, statDirErr, readdirErr, readdirDir}, "unable to get lock"); fs.lstat(Path.dirname(path), (statDirErr, statDir) =>
return callback(error); fs.readdir(Path.dirname(path), function(readdirErr, readdirDir) {
}) logger.err(
) {
); error,
} else { path,
return runner((error1, ...args) => statLock,
Lockfile.unlock(path, function(error2) { statLockErr,
error = error1 || error2; statDir,
if (error != null) { return callback(error); } statDirErr,
return callback(null, ...Array.from(args)); readdirErr,
}) readdirDir
); },
} 'unable to get lock'
}); )
} return callback(error)
}); })
)
)
} else {
return runner((error1, ...args) =>
Lockfile.unlock(path, function(error2) {
error = error1 || error2
if (error != null) {
return callback(error)
}
return callback(null, ...Array.from(args))
})
)
}
})
}
}

View File

@@ -1,4 +1,3 @@
// TODO: This file was created by bulk-decaffeinate. // TODO: This file was created by bulk-decaffeinate.
// Sanity-check the conversion and remove this comment. // Sanity-check the conversion and remove this comment.
module.exports = require("metrics-sharelatex"); module.exports = require('metrics-sharelatex')

View File

@@ -13,263 +13,387 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let OutputCacheManager; let OutputCacheManager
const async = require("async"); const async = require('async')
const fs = require("fs"); const fs = require('fs')
const fse = require("fs-extra"); const fse = require('fs-extra')
const Path = require("path"); const Path = require('path')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const _ = require("underscore"); const _ = require('underscore')
const Settings = require("settings-sharelatex"); const Settings = require('settings-sharelatex')
const crypto = require("crypto"); const crypto = require('crypto')
const OutputFileOptimiser = require("./OutputFileOptimiser"); const OutputFileOptimiser = require('./OutputFileOptimiser')
module.exports = (OutputCacheManager = { module.exports = OutputCacheManager = {
CACHE_SUBDIR: '.cache/clsi', CACHE_SUBDIR: '.cache/clsi',
ARCHIVE_SUBDIR: '.archive/clsi', ARCHIVE_SUBDIR: '.archive/clsi',
// build id is HEXDATE-HEXRANDOM from Date.now()and RandomBytes // build id is HEXDATE-HEXRANDOM from Date.now()and RandomBytes
// for backwards compatibility, make the randombytes part optional // for backwards compatibility, make the randombytes part optional
BUILD_REGEX: /^[0-9a-f]+(-[0-9a-f]+)?$/, BUILD_REGEX: /^[0-9a-f]+(-[0-9a-f]+)?$/,
CACHE_LIMIT: 2, // maximum number of cache directories CACHE_LIMIT: 2, // maximum number of cache directories
CACHE_AGE: 60*60*1000, // up to one hour old CACHE_AGE: 60 * 60 * 1000, // up to one hour old
path(buildId, file) { path(buildId, file) {
// used by static server, given build id return '.cache/clsi/buildId' // used by static server, given build id return '.cache/clsi/buildId'
if (buildId.match(OutputCacheManager.BUILD_REGEX)) { if (buildId.match(OutputCacheManager.BUILD_REGEX)) {
return Path.join(OutputCacheManager.CACHE_SUBDIR, buildId, file); return Path.join(OutputCacheManager.CACHE_SUBDIR, buildId, file)
} else { } else {
// for invalid build id, return top level // for invalid build id, return top level
return file; return file
} }
}, },
generateBuildId(callback) { generateBuildId(callback) {
// generate a secure build id from Date.now() and 8 random bytes in hex // generate a secure build id from Date.now() and 8 random bytes in hex
if (callback == null) { callback = function(error, buildId) {}; } if (callback == null) {
return crypto.randomBytes(8, function(err, buf) { callback = function(error, buildId) {}
if (err != null) { return callback(err); } }
const random = buf.toString('hex'); return crypto.randomBytes(8, function(err, buf) {
const date = Date.now().toString(16); if (err != null) {
return callback(err, `${date}-${random}`); return callback(err)
}); }
}, const random = buf.toString('hex')
const date = Date.now().toString(16)
return callback(err, `${date}-${random}`)
})
},
saveOutputFiles(outputFiles, compileDir, callback) { saveOutputFiles(outputFiles, compileDir, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
return OutputCacheManager.generateBuildId(function(err, buildId) { callback = function(error) {}
if (err != null) { return callback(err); } }
return OutputCacheManager.saveOutputFilesInBuildDir(outputFiles, compileDir, buildId, callback); return OutputCacheManager.generateBuildId(function(err, buildId) {
}); if (err != null) {
}, return callback(err)
}
return OutputCacheManager.saveOutputFilesInBuildDir(
outputFiles,
compileDir,
buildId,
callback
)
})
},
saveOutputFilesInBuildDir(outputFiles, compileDir, buildId, callback) { saveOutputFilesInBuildDir(outputFiles, compileDir, buildId, callback) {
// make a compileDir/CACHE_SUBDIR/build_id directory and // make a compileDir/CACHE_SUBDIR/build_id directory and
// copy all the output files into it // copy all the output files into it
if (callback == null) { callback = function(error) {}; } if (callback == null) {
const cacheRoot = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR); callback = function(error) {}
// Put the files into a new cache subdirectory }
const cacheDir = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR, buildId); const cacheRoot = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR)
// Is it a per-user compile? check if compile directory is PROJECTID-USERID // Put the files into a new cache subdirectory
const perUser = Path.basename(compileDir).match(/^[0-9a-f]{24}-[0-9a-f]{24}$/); const cacheDir = Path.join(
compileDir,
OutputCacheManager.CACHE_SUBDIR,
buildId
)
// Is it a per-user compile? check if compile directory is PROJECTID-USERID
const perUser = Path.basename(compileDir).match(
/^[0-9a-f]{24}-[0-9a-f]{24}$/
)
// Archive logs in background // Archive logs in background
if ((Settings.clsi != null ? Settings.clsi.archive_logs : undefined) || (Settings.clsi != null ? Settings.clsi.strace : undefined)) { if (
OutputCacheManager.archiveLogs(outputFiles, compileDir, buildId, function(err) { (Settings.clsi != null ? Settings.clsi.archive_logs : undefined) ||
if (err != null) { (Settings.clsi != null ? Settings.clsi.strace : undefined)
return logger.warn({err}, "erroring archiving log files"); ) {
} OutputCacheManager.archiveLogs(outputFiles, compileDir, buildId, function(
}); err
} ) {
if (err != null) {
return logger.warn({ err }, 'erroring archiving log files')
}
})
}
// make the new cache directory // make the new cache directory
return fse.ensureDir(cacheDir, function(err) { return fse.ensureDir(cacheDir, function(err) {
if (err != null) { if (err != null) {
logger.error({err, directory: cacheDir}, "error creating cache directory"); logger.error(
return callback(err, outputFiles); { err, directory: cacheDir },
} else { 'error creating cache directory'
// copy all the output files into the new cache directory )
const results = []; return callback(err, outputFiles)
return async.mapSeries(outputFiles, function(file, cb) { } else {
// don't send dot files as output, express doesn't serve them // copy all the output files into the new cache directory
if (OutputCacheManager._fileIsHidden(file.path)) { const results = []
logger.debug({compileDir, path: file.path}, "ignoring dotfile in output"); return async.mapSeries(
return cb(); outputFiles,
} function(file, cb) {
// copy other files into cache directory if valid // don't send dot files as output, express doesn't serve them
const newFile = _.clone(file); if (OutputCacheManager._fileIsHidden(file.path)) {
const [src, dst] = Array.from([Path.join(compileDir, file.path), Path.join(cacheDir, file.path)]); logger.debug(
return OutputCacheManager._checkFileIsSafe(src, function(err, isSafe) { { compileDir, path: file.path },
if (err != null) { return cb(err); } 'ignoring dotfile in output'
if (!isSafe) { )
return cb(); return cb()
} }
return OutputCacheManager._checkIfShouldCopy(src, function(err, shouldCopy) { // copy other files into cache directory if valid
if (err != null) { return cb(err); } const newFile = _.clone(file)
if (!shouldCopy) { const [src, dst] = Array.from([
return cb(); Path.join(compileDir, file.path),
} Path.join(cacheDir, file.path)
return OutputCacheManager._copyFile(src, dst, function(err) { ])
if (err != null) { return cb(err); } return OutputCacheManager._checkFileIsSafe(src, function(
newFile.build = buildId; // attach a build id if we cached the file err,
results.push(newFile); isSafe
return cb(); ) {
}); if (err != null) {
}); return cb(err)
}); }
} if (!isSafe) {
, function(err) { return cb()
if (err != null) { }
// pass back the original files if we encountered *any* error return OutputCacheManager._checkIfShouldCopy(src, function(
callback(err, outputFiles); err,
// clean up the directory we just created shouldCopy
return fse.remove(cacheDir, function(err) { ) {
if (err != null) { if (err != null) {
return logger.error({err, dir: cacheDir}, "error removing cache dir after failure"); return cb(err)
} }
}); if (!shouldCopy) {
} else { return cb()
// pass back the list of new files in the cache }
callback(err, results); return OutputCacheManager._copyFile(src, dst, function(err) {
// let file expiry run in the background, expire all previous files if per-user if (err != null) {
return OutputCacheManager.expireOutputFiles(cacheRoot, {keep: buildId, limit: perUser ? 1 : null}); return cb(err)
} }
}); newFile.build = buildId // attach a build id if we cached the file
} results.push(newFile)
}); return cb()
}, })
})
})
},
function(err) {
if (err != null) {
// pass back the original files if we encountered *any* error
callback(err, outputFiles)
// clean up the directory we just created
return fse.remove(cacheDir, function(err) {
if (err != null) {
return logger.error(
{ err, dir: cacheDir },
'error removing cache dir after failure'
)
}
})
} else {
// pass back the list of new files in the cache
callback(err, results)
// let file expiry run in the background, expire all previous files if per-user
return OutputCacheManager.expireOutputFiles(cacheRoot, {
keep: buildId,
limit: perUser ? 1 : null
})
}
}
)
}
})
},
archiveLogs(outputFiles, compileDir, buildId, callback) { archiveLogs(outputFiles, compileDir, buildId, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
const archiveDir = Path.join(compileDir, OutputCacheManager.ARCHIVE_SUBDIR, buildId); callback = function(error) {}
logger.log({dir: archiveDir}, "archiving log files for project"); }
return fse.ensureDir(archiveDir, function(err) { const archiveDir = Path.join(
if (err != null) { return callback(err); } compileDir,
return async.mapSeries(outputFiles, function(file, cb) { OutputCacheManager.ARCHIVE_SUBDIR,
const [src, dst] = Array.from([Path.join(compileDir, file.path), Path.join(archiveDir, file.path)]); buildId
return OutputCacheManager._checkFileIsSafe(src, function(err, isSafe) { )
if (err != null) { return cb(err); } logger.log({ dir: archiveDir }, 'archiving log files for project')
if (!isSafe) { return cb(); } return fse.ensureDir(archiveDir, function(err) {
return OutputCacheManager._checkIfShouldArchive(src, function(err, shouldArchive) { if (err != null) {
if (err != null) { return cb(err); } return callback(err)
if (!shouldArchive) { return cb(); } }
return OutputCacheManager._copyFile(src, dst, cb); return async.mapSeries(
}); outputFiles,
}); function(file, cb) {
} const [src, dst] = Array.from([
, callback); Path.join(compileDir, file.path),
}); Path.join(archiveDir, file.path)
}, ])
return OutputCacheManager._checkFileIsSafe(src, function(
err,
isSafe
) {
if (err != null) {
return cb(err)
}
if (!isSafe) {
return cb()
}
return OutputCacheManager._checkIfShouldArchive(src, function(
err,
shouldArchive
) {
if (err != null) {
return cb(err)
}
if (!shouldArchive) {
return cb()
}
return OutputCacheManager._copyFile(src, dst, cb)
})
})
},
callback
)
})
},
expireOutputFiles(cacheRoot, options, callback) { expireOutputFiles(cacheRoot, options, callback) {
// look in compileDir for build dirs and delete if > N or age of mod time > T // look in compileDir for build dirs and delete if > N or age of mod time > T
if (callback == null) { callback = function(error) {}; } if (callback == null) {
return fs.readdir(cacheRoot, function(err, results) { callback = function(error) {}
if (err != null) { }
if (err.code === 'ENOENT') { return callback(null); } // cache directory is empty return fs.readdir(cacheRoot, function(err, results) {
logger.error({err, project_id: cacheRoot}, "error clearing cache"); if (err != null) {
return callback(err); if (err.code === 'ENOENT') {
} return callback(null)
} // cache directory is empty
logger.error({ err, project_id: cacheRoot }, 'error clearing cache')
return callback(err)
}
const dirs = results.sort().reverse(); const dirs = results.sort().reverse()
const currentTime = Date.now(); const currentTime = Date.now()
const isExpired = function(dir, index) { const isExpired = function(dir, index) {
if ((options != null ? options.keep : undefined) === dir) { return false; } if ((options != null ? options.keep : undefined) === dir) {
// remove any directories over the requested (non-null) limit return false
if (((options != null ? options.limit : undefined) != null) && (index > options.limit)) { return true; } }
// remove any directories over the hard limit // remove any directories over the requested (non-null) limit
if (index > OutputCacheManager.CACHE_LIMIT) { return true; } if (
// we can get the build time from the first part of the directory name DDDD-RRRR (options != null ? options.limit : undefined) != null &&
// DDDD is date and RRRR is random bytes index > options.limit
const dirTime = parseInt(__guard__(dir.split('-'), x => x[0]), 16); ) {
const age = currentTime - dirTime; return true
return age > OutputCacheManager.CACHE_AGE; }
}; // remove any directories over the hard limit
if (index > OutputCacheManager.CACHE_LIMIT) {
return true
}
// we can get the build time from the first part of the directory name DDDD-RRRR
// DDDD is date and RRRR is random bytes
const dirTime = parseInt(
__guard__(dir.split('-'), x => x[0]),
16
)
const age = currentTime - dirTime
return age > OutputCacheManager.CACHE_AGE
}
const toRemove = _.filter(dirs, isExpired); const toRemove = _.filter(dirs, isExpired)
const removeDir = (dir, cb) => const removeDir = (dir, cb) =>
fse.remove(Path.join(cacheRoot, dir), function(err, result) { fse.remove(Path.join(cacheRoot, dir), function(err, result) {
logger.log({cache: cacheRoot, dir}, "removed expired cache dir"); logger.log({ cache: cacheRoot, dir }, 'removed expired cache dir')
if (err != null) { if (err != null) {
logger.error({err, dir}, "cache remove error"); logger.error({ err, dir }, 'cache remove error')
} }
return cb(err, result); return cb(err, result)
}) })
; return async.eachSeries(
toRemove,
(dir, cb) => removeDir(dir, cb),
callback
)
})
},
return async.eachSeries(toRemove, (dir, cb) => removeDir(dir, cb) _fileIsHidden(path) {
, callback); return (path != null ? path.match(/^\.|\/\./) : undefined) != null
}); },
},
_fileIsHidden(path) { _checkFileIsSafe(src, callback) {
return ((path != null ? path.match(/^\.|\/\./) : undefined) != null); // check if we have a valid file to copy into the cache
}, if (callback == null) {
callback = function(error, isSafe) {}
}
return fs.stat(src, function(err, stats) {
if ((err != null ? err.code : undefined) === 'ENOENT') {
logger.warn(
{ err, file: src },
'file has disappeared before copying to build cache'
)
return callback(err, false)
} else if (err != null) {
// some other problem reading the file
logger.error({ err, file: src }, 'stat error for file in cache')
return callback(err, false)
} else if (!stats.isFile()) {
// other filetype - reject it
logger.warn(
{ src, stat: stats },
'nonfile output - refusing to copy to cache'
)
return callback(null, false)
} else {
// it's a plain file, ok to copy
return callback(null, true)
}
})
},
_checkFileIsSafe(src, callback) { _copyFile(src, dst, callback) {
// check if we have a valid file to copy into the cache // copy output file into the cache
if (callback == null) { callback = function(error, isSafe) {}; } return fse.copy(src, dst, function(err) {
return fs.stat(src, function(err, stats) { if ((err != null ? err.code : undefined) === 'ENOENT') {
if ((err != null ? err.code : undefined) === 'ENOENT') { logger.warn(
logger.warn({err, file: src}, "file has disappeared before copying to build cache"); { err, file: src },
return callback(err, false); 'file has disappeared when copying to build cache'
} else if (err != null) { )
// some other problem reading the file return callback(err, false)
logger.error({err, file: src}, "stat error for file in cache"); } else if (err != null) {
return callback(err, false); logger.error({ err, src, dst }, 'copy error for file in cache')
} else if (!stats.isFile()) { return callback(err)
// other filetype - reject it } else {
logger.warn({src, stat: stats}, "nonfile output - refusing to copy to cache"); if (
return callback(null, false); Settings.clsi != null ? Settings.clsi.optimiseInDocker : undefined
} else { ) {
// it's a plain file, ok to copy // don't run any optimisations on the pdf when they are done
return callback(null, true); // in the docker container
} return callback()
}); } else {
}, // call the optimiser for the file too
return OutputFileOptimiser.optimiseFile(src, dst, callback)
}
}
})
},
_copyFile(src, dst, callback) { _checkIfShouldCopy(src, callback) {
// copy output file into the cache if (callback == null) {
return fse.copy(src, dst, function(err) { callback = function(err, shouldCopy) {}
if ((err != null ? err.code : undefined) === 'ENOENT') { }
logger.warn({err, file: src}, "file has disappeared when copying to build cache"); return callback(null, !Path.basename(src).match(/^strace/))
return callback(err, false); },
} else if (err != null) {
logger.error({err, src, dst}, "copy error for file in cache");
return callback(err);
} else {
if ((Settings.clsi != null ? Settings.clsi.optimiseInDocker : undefined)) {
// don't run any optimisations on the pdf when they are done
// in the docker container
return callback();
} else {
// call the optimiser for the file too
return OutputFileOptimiser.optimiseFile(src, dst, callback);
}
}
});
},
_checkIfShouldCopy(src, callback) { _checkIfShouldArchive(src, callback) {
if (callback == null) { callback = function(err, shouldCopy) {}; } let needle
return callback(null, !Path.basename(src).match(/^strace/)); if (callback == null) {
}, callback = function(err, shouldCopy) {}
}
_checkIfShouldArchive(src, callback) { if (Path.basename(src).match(/^strace/)) {
let needle; return callback(null, true)
if (callback == null) { callback = function(err, shouldCopy) {}; } }
if (Path.basename(src).match(/^strace/)) { if (
return callback(null, true); (Settings.clsi != null ? Settings.clsi.archive_logs : undefined) &&
} ((needle = Path.basename(src)),
if ((Settings.clsi != null ? Settings.clsi.archive_logs : undefined) && (needle = Path.basename(src), ["output.log", "output.blg"].includes(needle))) { ['output.log', 'output.blg'].includes(needle))
return callback(null, true); ) {
} return callback(null, true)
return callback(null, false); }
} return callback(null, false)
}); }
}
function __guard__(value, transform) { function __guard__(value, transform) {
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; return typeof value !== 'undefined' && value !== null
} ? transform(value)
: undefined
}

View File

@@ -14,73 +14,102 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let OutputFileFinder; let OutputFileFinder
const async = require("async"); const async = require('async')
const fs = require("fs"); const fs = require('fs')
const Path = require("path"); const Path = require('path')
const { spawn } = require("child_process"); const { spawn } = require('child_process')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
module.exports = (OutputFileFinder = { module.exports = OutputFileFinder = {
findOutputFiles(resources, directory, callback) { findOutputFiles(resources, directory, callback) {
if (callback == null) { callback = function(error, outputFiles, allFiles) {}; } if (callback == null) {
const incomingResources = {}; callback = function(error, outputFiles, allFiles) {}
for (const resource of Array.from(resources)) { }
incomingResources[resource.path] = true; const incomingResources = {}
} for (const resource of Array.from(resources)) {
incomingResources[resource.path] = true
return OutputFileFinder._getAllFiles(directory, function(error, allFiles) { }
if (allFiles == null) { allFiles = []; }
if (error != null) {
logger.err({err:error}, "error finding all output files");
return callback(error);
}
const outputFiles = [];
for (const file of Array.from(allFiles)) {
if (!incomingResources[file]) {
outputFiles.push({
path: file,
type: __guard__(file.match(/\.([^\.]+)$/), x => x[1])
});
}
}
return callback(null, outputFiles, allFiles);
});
},
_getAllFiles(directory, _callback) { return OutputFileFinder._getAllFiles(directory, function(error, allFiles) {
if (_callback == null) { _callback = function(error, fileList) {}; } if (allFiles == null) {
const callback = function(error, fileList) { allFiles = []
_callback(error, fileList); }
return _callback = function() {}; if (error != null) {
}; logger.err({ err: error }, 'error finding all output files')
return callback(error)
}
const outputFiles = []
for (const file of Array.from(allFiles)) {
if (!incomingResources[file]) {
outputFiles.push({
path: file,
type: __guard__(file.match(/\.([^\.]+)$/), x => x[1])
})
}
}
return callback(null, outputFiles, allFiles)
})
},
// don't include clsi-specific files/directories in the output list _getAllFiles(directory, _callback) {
const EXCLUDE_DIRS = ["-name", ".cache", "-o", "-name", ".archive","-o", "-name", ".project-*"]; if (_callback == null) {
const args = [directory, "(", ...Array.from(EXCLUDE_DIRS), ")", "-prune", "-o", "-type", "f", "-print"]; _callback = function(error, fileList) {}
logger.log({args}, "running find command"); }
const callback = function(error, fileList) {
_callback(error, fileList)
return (_callback = function() {})
}
const proc = spawn("find", args); // don't include clsi-specific files/directories in the output list
let stdout = ""; const EXCLUDE_DIRS = [
proc.stdout.on("data", chunk => stdout += chunk.toString()); '-name',
proc.on("error", callback); '.cache',
return proc.on("close", function(code) { '-o',
if (code !== 0) { '-name',
logger.warn({directory, code}, "find returned error, directory likely doesn't exist"); '.archive',
return callback(null, []); '-o',
} '-name',
let fileList = stdout.trim().split("\n"); '.project-*'
fileList = fileList.map(function(file) { ]
// Strip leading directory const args = [
let path; directory,
return path = Path.relative(directory, file); '(',
}); ...Array.from(EXCLUDE_DIRS),
return callback(null, fileList); ')',
}); '-prune',
} '-o',
}); '-type',
'f',
'-print'
]
logger.log({ args }, 'running find command')
const proc = spawn('find', args)
let stdout = ''
proc.stdout.on('data', chunk => (stdout += chunk.toString()))
proc.on('error', callback)
return proc.on('close', function(code) {
if (code !== 0) {
logger.warn(
{ directory, code },
"find returned error, directory likely doesn't exist"
)
return callback(null, [])
}
let fileList = stdout.trim().split('\n')
fileList = fileList.map(function(file) {
// Strip leading directory
let path
return (path = Path.relative(directory, file))
})
return callback(null, fileList)
})
}
}
function __guard__(value, transform) { function __guard__(value, transform) {
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; return typeof value !== 'undefined' && value !== null
} ? transform(value)
: undefined
}

View File

@@ -13,74 +13,92 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let OutputFileOptimiser; let OutputFileOptimiser
const fs = require("fs"); const fs = require('fs')
const Path = require("path"); const Path = require('path')
const { spawn } = require("child_process"); const { spawn } = require('child_process')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const Metrics = require("./Metrics"); const Metrics = require('./Metrics')
const _ = require("underscore"); const _ = require('underscore')
module.exports = (OutputFileOptimiser = { module.exports = OutputFileOptimiser = {
optimiseFile(src, dst, callback) {
// check output file (src) and see if we can optimise it, storing
// the result in the build directory (dst)
if (callback == null) {
callback = function(error) {}
}
if (src.match(/\/output\.pdf$/)) {
return OutputFileOptimiser.checkIfPDFIsOptimised(src, function(
err,
isOptimised
) {
if (err != null || isOptimised) {
return callback(null)
}
return OutputFileOptimiser.optimisePDF(src, dst, callback)
})
} else {
return callback(null)
}
},
optimiseFile(src, dst, callback) { checkIfPDFIsOptimised(file, callback) {
// check output file (src) and see if we can optimise it, storing const SIZE = 16 * 1024 // check the header of the pdf
// the result in the build directory (dst) const result = new Buffer(SIZE)
if (callback == null) { callback = function(error) {}; } result.fill(0) // prevent leakage of uninitialised buffer
if (src.match(/\/output\.pdf$/)) { return fs.open(file, 'r', function(err, fd) {
return OutputFileOptimiser.checkIfPDFIsOptimised(src, function(err, isOptimised) { if (err != null) {
if ((err != null) || isOptimised) { return callback(null); } return callback(err)
return OutputFileOptimiser.optimisePDF(src, dst, callback); }
}); return fs.read(fd, result, 0, SIZE, 0, (errRead, bytesRead, buffer) =>
} else { fs.close(fd, function(errClose) {
return callback((null)); if (errRead != null) {
} return callback(errRead)
}, }
if (typeof errReadClose !== 'undefined' && errReadClose !== null) {
return callback(errClose)
}
const isOptimised =
buffer.toString('ascii').indexOf('/Linearized 1') >= 0
return callback(null, isOptimised)
})
)
})
},
checkIfPDFIsOptimised(file, callback) { optimisePDF(src, dst, callback) {
const SIZE = 16*1024; // check the header of the pdf if (callback == null) {
const result = new Buffer(SIZE); callback = function(error) {}
result.fill(0); // prevent leakage of uninitialised buffer }
return fs.open(file, "r", function(err, fd) { const tmpOutput = dst + '.opt'
if (err != null) { return callback(err); } const args = ['--linearize', src, tmpOutput]
return fs.read(fd, result, 0, SIZE, 0, (errRead, bytesRead, buffer) => logger.log({ args }, 'running qpdf command')
fs.close(fd, function(errClose) {
if (errRead != null) { return callback(errRead); }
if (typeof errReadClose !== 'undefined' && errReadClose !== null) { return callback(errClose); }
const isOptimised = buffer.toString('ascii').indexOf("/Linearized 1") >= 0;
return callback(null, isOptimised);
})
);
});
},
optimisePDF(src, dst, callback) { const timer = new Metrics.Timer('qpdf')
if (callback == null) { callback = function(error) {}; } const proc = spawn('qpdf', args)
const tmpOutput = dst + '.opt'; let stdout = ''
const args = ["--linearize", src, tmpOutput]; proc.stdout.on('data', chunk => (stdout += chunk.toString()))
logger.log({args}, "running qpdf command"); callback = _.once(callback) // avoid double call back for error and close event
proc.on('error', function(err) {
const timer = new Metrics.Timer("qpdf"); logger.warn({ err, args }, 'qpdf failed')
const proc = spawn("qpdf", args); return callback(null)
let stdout = ""; }) // ignore the error
proc.stdout.on("data", chunk => stdout += chunk.toString()); return proc.on('close', function(code) {
callback = _.once(callback); // avoid double call back for error and close event timer.done()
proc.on("error", function(err) { if (code !== 0) {
logger.warn({err, args}, "qpdf failed"); logger.warn({ code, args }, 'qpdf returned error')
return callback(null); return callback(null) // ignore the error
}); // ignore the error }
return proc.on("close", function(code) { return fs.rename(tmpOutput, dst, function(err) {
timer.done(); if (err != null) {
if (code !== 0) { logger.warn(
logger.warn({code, args}, "qpdf returned error"); { tmpOutput, dst },
return callback(null); // ignore the error 'failed to rename output of qpdf command'
} )
return fs.rename(tmpOutput, dst, function(err) { }
if (err != null) { return callback(null)
logger.warn({tmpOutput, dst}, "failed to rename output of qpdf command"); })
} })
return callback(null); } // ignore the error
}); }
});
} // ignore the error
});

View File

@@ -11,113 +11,153 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let ProjectPersistenceManager; let ProjectPersistenceManager
const UrlCache = require("./UrlCache"); const UrlCache = require('./UrlCache')
const CompileManager = require("./CompileManager"); const CompileManager = require('./CompileManager')
const db = require("./db"); const db = require('./db')
const dbQueue = require("./DbQueue"); const dbQueue = require('./DbQueue')
const async = require("async"); const async = require('async')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const oneDay = 24 * 60 * 60 * 1000; const oneDay = 24 * 60 * 60 * 1000
const Settings = require("settings-sharelatex"); const Settings = require('settings-sharelatex')
module.exports = (ProjectPersistenceManager = { module.exports = ProjectPersistenceManager = {
EXPIRY_TIMEOUT: Settings.project_cache_length_ms || oneDay * 2.5,
EXPIRY_TIMEOUT: Settings.project_cache_length_ms || (oneDay * 2.5), markProjectAsJustAccessed(project_id, callback) {
if (callback == null) {
callback = function(error) {}
}
const job = cb =>
db.Project.findOrCreate({ where: { project_id } })
.spread((project, created) =>
project
.updateAttributes({ lastAccessed: new Date() })
.then(() => cb())
.error(cb)
)
.error(cb)
return dbQueue.queue.push(job, callback)
},
markProjectAsJustAccessed(project_id, callback) { clearExpiredProjects(callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
const job = cb=> callback = function(error) {}
db.Project.findOrCreate({where: {project_id}}) }
.spread( return ProjectPersistenceManager._findExpiredProjectIds(function(
(project, created) => error,
project.updateAttributes({lastAccessed: new Date()}) project_ids
.then(() => cb()) ) {
.error(cb) if (error != null) {
) return callback(error)
.error(cb) }
; logger.log({ project_ids }, 'clearing expired projects')
return dbQueue.queue.push(job, callback); const jobs = Array.from(project_ids || []).map(project_id =>
}, (project_id => callback =>
ProjectPersistenceManager.clearProjectFromCache(project_id, function(
err
) {
if (err != null) {
logger.error({ err, project_id }, 'error clearing project')
}
return callback()
}))(project_id)
)
return async.series(jobs, function(error) {
if (error != null) {
return callback(error)
}
return CompileManager.clearExpiredProjects(
ProjectPersistenceManager.EXPIRY_TIMEOUT,
error => callback()
)
})
})
}, // ignore any errors from deleting directories
clearProject(project_id, user_id, callback) {
if (callback == null) {
callback = function(error) {}
}
logger.log({ project_id, user_id }, 'clearing project for user')
return CompileManager.clearProject(project_id, user_id, function(error) {
if (error != null) {
return callback(error)
}
return ProjectPersistenceManager.clearProjectFromCache(
project_id,
function(error) {
if (error != null) {
return callback(error)
}
return callback()
}
)
})
},
clearExpiredProjects(callback) { clearProjectFromCache(project_id, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
return ProjectPersistenceManager._findExpiredProjectIds(function(error, project_ids) { callback = function(error) {}
if (error != null) { return callback(error); } }
logger.log({project_ids}, "clearing expired projects"); logger.log({ project_id }, 'clearing project from cache')
const jobs = (Array.from(project_ids || [])).map((project_id) => return UrlCache.clearProject(project_id, function(error) {
(project_id => if (error != null) {
callback => logger.err({ error, project_id }, 'error clearing project from cache')
ProjectPersistenceManager.clearProjectFromCache(project_id, function(err) { return callback(error)
if (err != null) { }
logger.error({err, project_id}, "error clearing project"); return ProjectPersistenceManager._clearProjectFromDatabase(
} project_id,
return callback(); function(error) {
}) if (error != null) {
logger.err(
)(project_id)); { error, project_id },
return async.series(jobs, function(error) { 'error clearing project from database'
if (error != null) { return callback(error); } )
return CompileManager.clearExpiredProjects(ProjectPersistenceManager.EXPIRY_TIMEOUT, error => callback()); }
}); return callback(error)
}); }
}, // ignore any errors from deleting directories )
})
},
clearProject(project_id, user_id, callback) { _clearProjectFromDatabase(project_id, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
logger.log({project_id, user_id}, "clearing project for user"); callback = function(error) {}
return CompileManager.clearProject(project_id, user_id, function(error) { }
if (error != null) { return callback(error); } logger.log({ project_id }, 'clearing project from database')
return ProjectPersistenceManager.clearProjectFromCache(project_id, function(error) { const job = cb =>
if (error != null) { return callback(error); } db.Project.destroy({ where: { project_id } })
return callback(); .then(() => cb())
}); .error(cb)
}); return dbQueue.queue.push(job, callback)
}, },
clearProjectFromCache(project_id, callback) { _findExpiredProjectIds(callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
logger.log({project_id}, "clearing project from cache"); callback = function(error, project_ids) {}
return UrlCache.clearProject(project_id, function(error) { }
if (error != null) { const job = function(cb) {
logger.err({error, project_id}, "error clearing project from cache"); const keepProjectsFrom = new Date(
return callback(error); Date.now() - ProjectPersistenceManager.EXPIRY_TIMEOUT
} )
return ProjectPersistenceManager._clearProjectFromDatabase(project_id, function(error) { const q = {}
if (error != null) { q[db.op.lt] = keepProjectsFrom
logger.err({error, project_id}, "error clearing project from database"); return db.Project.findAll({ where: { lastAccessed: q } })
} .then(projects =>
return callback(error); cb(
}); null,
}); projects.map(project => project.project_id)
}, )
)
.error(cb)
}
_clearProjectFromDatabase(project_id, callback) { return dbQueue.queue.push(job, callback)
if (callback == null) { callback = function(error) {}; } }
logger.log({project_id}, "clearing project from database"); }
const job = cb=>
db.Project.destroy({where: {project_id}})
.then(() => cb())
.error(cb)
;
return dbQueue.queue.push(job, callback);
},
logger.log(
_findExpiredProjectIds(callback) { { EXPIRY_TIMEOUT: ProjectPersistenceManager.EXPIRY_TIMEOUT },
if (callback == null) { callback = function(error, project_ids) {}; } 'project assets kept timeout'
const job = function(cb){ )
const keepProjectsFrom = new Date(Date.now() - ProjectPersistenceManager.EXPIRY_TIMEOUT);
const q = {};
q[db.op.lt] = keepProjectsFrom;
return db.Project.findAll({where:{lastAccessed:q}})
.then(projects => cb(null, projects.map(project => project.project_id))).error(cb);
};
return dbQueue.queue.push(job, callback);
}
});
logger.log({EXPIRY_TIMEOUT: ProjectPersistenceManager.EXPIRY_TIMEOUT}, "project assets kept timeout");

View File

@@ -17,177 +17,201 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let RequestParser; let RequestParser
const settings = require("settings-sharelatex"); const settings = require('settings-sharelatex')
module.exports = (RequestParser = { module.exports = RequestParser = {
VALID_COMPILERS: ["pdflatex", "latex", "xelatex", "lualatex"], VALID_COMPILERS: ['pdflatex', 'latex', 'xelatex', 'lualatex'],
MAX_TIMEOUT: 600, MAX_TIMEOUT: 600,
parse(body, callback) { parse(body, callback) {
let resource; let resource
if (callback == null) { callback = function(error, data) {}; } if (callback == null) {
const response = {}; callback = function(error, data) {}
}
const response = {}
if ((body.compile == null)) { if (body.compile == null) {
return callback("top level object should have a compile attribute"); return callback('top level object should have a compile attribute')
} }
const { compile } = body; const { compile } = body
if (!compile.options) { compile.options = {}; } if (!compile.options) {
compile.options = {}
}
try { try {
response.compiler = this._parseAttribute("compiler", response.compiler = this._parseAttribute(
compile.options.compiler, { 'compiler',
validValues: this.VALID_COMPILERS, compile.options.compiler,
default: "pdflatex", {
type: "string" validValues: this.VALID_COMPILERS,
} default: 'pdflatex',
); type: 'string'
response.timeout = this._parseAttribute("timeout", }
compile.options.timeout, { )
default: RequestParser.MAX_TIMEOUT, response.timeout = this._parseAttribute(
type: "number" 'timeout',
} compile.options.timeout,
); {
response.imageName = this._parseAttribute("imageName", default: RequestParser.MAX_TIMEOUT,
compile.options.imageName, type: 'number'
{type: "string"}); }
response.draft = this._parseAttribute("draft", )
compile.options.draft, { response.imageName = this._parseAttribute(
default: false, 'imageName',
type: "boolean" compile.options.imageName,
} { type: 'string' }
); )
response.check = this._parseAttribute("check", response.draft = this._parseAttribute('draft', compile.options.draft, {
compile.options.check, default: false,
{type: "string"}); type: 'boolean'
response.flags = this._parseAttribute("flags", })
compile.options.flags, { response.check = this._parseAttribute('check', compile.options.check, {
default: [], type: 'string'
type: "object" })
} response.flags = this._parseAttribute('flags', compile.options.flags, {
); default: [],
type: 'object'
})
// The syncType specifies whether the request contains all // The syncType specifies whether the request contains all
// resources (full) or only those resources to be updated // resources (full) or only those resources to be updated
// in-place (incremental). // in-place (incremental).
response.syncType = this._parseAttribute("syncType", response.syncType = this._parseAttribute(
compile.options.syncType, { 'syncType',
validValues: ["full", "incremental"], compile.options.syncType,
type: "string" {
} validValues: ['full', 'incremental'],
); type: 'string'
}
)
// The syncState is an identifier passed in with the request // The syncState is an identifier passed in with the request
// which has the property that it changes when any resource is // which has the property that it changes when any resource is
// added, deleted, moved or renamed. // added, deleted, moved or renamed.
// //
// on syncType full the syncState identifier is passed in and // on syncType full the syncState identifier is passed in and
// stored // stored
// //
// on syncType incremental the syncState identifier must match // on syncType incremental the syncState identifier must match
// the stored value // the stored value
response.syncState = this._parseAttribute("syncState", response.syncState = this._parseAttribute(
compile.options.syncState, 'syncState',
{type: "string"}); compile.options.syncState,
{ type: 'string' }
)
if (response.timeout > RequestParser.MAX_TIMEOUT) { if (response.timeout > RequestParser.MAX_TIMEOUT) {
response.timeout = RequestParser.MAX_TIMEOUT; response.timeout = RequestParser.MAX_TIMEOUT
} }
response.timeout = response.timeout * 1000; // milliseconds response.timeout = response.timeout * 1000 // milliseconds
response.resources = ((() => { response.resources = (() => {
const result = []; const result = []
for (resource of Array.from((compile.resources || []))) { result.push(this._parseResource(resource)); for (resource of Array.from(compile.resources || [])) {
} result.push(this._parseResource(resource))
return result; }
})()); return result
})()
const rootResourcePath = this._parseAttribute("rootResourcePath", const rootResourcePath = this._parseAttribute(
compile.rootResourcePath, { 'rootResourcePath',
default: "main.tex", compile.rootResourcePath,
type: "string" {
} default: 'main.tex',
); type: 'string'
const originalRootResourcePath = rootResourcePath; }
const sanitizedRootResourcePath = RequestParser._sanitizePath(rootResourcePath); )
response.rootResourcePath = RequestParser._checkPath(sanitizedRootResourcePath); const originalRootResourcePath = rootResourcePath
const sanitizedRootResourcePath = RequestParser._sanitizePath(
rootResourcePath
)
response.rootResourcePath = RequestParser._checkPath(
sanitizedRootResourcePath
)
for (resource of Array.from(response.resources)) { for (resource of Array.from(response.resources)) {
if (resource.path === originalRootResourcePath) { if (resource.path === originalRootResourcePath) {
resource.path = sanitizedRootResourcePath; resource.path = sanitizedRootResourcePath
} }
} }
} catch (error1) { } catch (error1) {
const error = error1; const error = error1
return callback(error); return callback(error)
} }
return callback(null, response); return callback(null, response)
}, },
_parseResource(resource) { _parseResource(resource) {
let modified; let modified
if ((resource.path == null) || (typeof resource.path !== "string")) { if (resource.path == null || typeof resource.path !== 'string') {
throw "all resources should have a path attribute"; throw 'all resources should have a path attribute'
} }
if (resource.modified != null) { if (resource.modified != null) {
modified = new Date(resource.modified); modified = new Date(resource.modified)
if (isNaN(modified.getTime())) { if (isNaN(modified.getTime())) {
throw `resource modified date could not be understood: ${resource.modified}`; throw `resource modified date could not be understood: ${resource.modified}`
} }
} }
if ((resource.url == null) && (resource.content == null)) { if (resource.url == null && resource.content == null) {
throw "all resources should have either a url or content attribute"; throw 'all resources should have either a url or content attribute'
} }
if ((resource.content != null) && (typeof resource.content !== "string")) { if (resource.content != null && typeof resource.content !== 'string') {
throw "content attribute should be a string"; throw 'content attribute should be a string'
} }
if ((resource.url != null) && (typeof resource.url !== "string")) { if (resource.url != null && typeof resource.url !== 'string') {
throw "url attribute should be a string"; throw 'url attribute should be a string'
} }
return { return {
path: resource.path, path: resource.path,
modified, modified,
url: resource.url, url: resource.url,
content: resource.content content: resource.content
}; }
}, },
_parseAttribute(name, attribute, options) { _parseAttribute(name, attribute, options) {
if (attribute != null) { if (attribute != null) {
if (options.validValues != null) { if (options.validValues != null) {
if (options.validValues.indexOf(attribute) === -1) { if (options.validValues.indexOf(attribute) === -1) {
throw `${name} attribute should be one of: ${options.validValues.join(", ")}`; throw `${name} attribute should be one of: ${options.validValues.join(
} ', '
} )}`
if (options.type != null) { }
if (typeof attribute !== options.type) { }
throw `${name} attribute should be a ${options.type}`; if (options.type != null) {
} if (typeof attribute !== options.type) {
} throw `${name} attribute should be a ${options.type}`
} else { }
if (options.default != null) { return options.default; } }
} } else {
return attribute; if (options.default != null) {
}, return options.default
}
}
return attribute
},
_sanitizePath(path) { _sanitizePath(path) {
// See http://php.net/manual/en/function.escapeshellcmd.php // See http://php.net/manual/en/function.escapeshellcmd.php
return path.replace(/[\#\&\;\`\|\*\?\~\<\>\^\(\)\[\]\{\}\$\\\x0A\xFF\x00]/g, ""); return path.replace(
}, /[\#\&\;\`\|\*\?\~\<\>\^\(\)\[\]\{\}\$\\\x0A\xFF\x00]/g,
''
)
},
_checkPath(path) { _checkPath(path) {
// check that the request does not use a relative path // check that the request does not use a relative path
for (const dir of Array.from(path.split('/'))) { for (const dir of Array.from(path.split('/'))) {
if (dir === '..') { if (dir === '..') {
throw "relative path in root resource"; throw 'relative path in root resource'
} }
} }
return path; return path
} }
}); }

View File

@@ -13,102 +13,142 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let ResourceStateManager; let ResourceStateManager
const Path = require("path"); const Path = require('path')
const fs = require("fs"); const fs = require('fs')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const settings = require("settings-sharelatex"); const settings = require('settings-sharelatex')
const Errors = require("./Errors"); const Errors = require('./Errors')
const SafeReader = require("./SafeReader"); const SafeReader = require('./SafeReader')
module.exports = (ResourceStateManager = { module.exports = ResourceStateManager = {
// The sync state is an identifier which must match for an
// incremental update to be allowed.
//
// The initial value is passed in and stored on a full
// compile, along with the list of resources..
//
// Subsequent incremental compiles must come with the same value - if
// not they will be rejected with a 409 Conflict response. The
// previous list of resources is returned.
//
// An incremental compile can only update existing files with new
// content. The sync state identifier must change if any docs or
// files are moved, added, deleted or renamed.
// The sync state is an identifier which must match for an SYNC_STATE_FILE: '.project-sync-state',
// incremental update to be allowed. SYNC_STATE_MAX_SIZE: 128 * 1024,
//
// The initial value is passed in and stored on a full
// compile, along with the list of resources..
//
// Subsequent incremental compiles must come with the same value - if
// not they will be rejected with a 409 Conflict response. The
// previous list of resources is returned.
//
// An incremental compile can only update existing files with new
// content. The sync state identifier must change if any docs or
// files are moved, added, deleted or renamed.
SYNC_STATE_FILE: ".project-sync-state", saveProjectState(state, resources, basePath, callback) {
SYNC_STATE_MAX_SIZE: 128*1024, if (callback == null) {
callback = function(error) {}
}
const stateFile = Path.join(basePath, this.SYNC_STATE_FILE)
if (state == null) {
// remove the file if no state passed in
logger.log({ state, basePath }, 'clearing sync state')
return fs.unlink(stateFile, function(err) {
if (err != null && err.code !== 'ENOENT') {
return callback(err)
} else {
return callback()
}
})
} else {
logger.log({ state, basePath }, 'writing sync state')
const resourceList = Array.from(resources).map(resource => resource.path)
return fs.writeFile(
stateFile,
[...Array.from(resourceList), `stateHash:${state}`].join('\n'),
callback
)
}
},
saveProjectState(state, resources, basePath, callback) { checkProjectStateMatches(state, basePath, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
const stateFile = Path.join(basePath, this.SYNC_STATE_FILE); callback = function(error, resources) {}
if ((state == null)) { // remove the file if no state passed in }
logger.log({state, basePath}, "clearing sync state"); const stateFile = Path.join(basePath, this.SYNC_STATE_FILE)
return fs.unlink(stateFile, function(err) { const size = this.SYNC_STATE_MAX_SIZE
if ((err != null) && (err.code !== 'ENOENT')) { return SafeReader.readFile(stateFile, size, 'utf8', function(
return callback(err); err,
} else { result,
return callback(); bytesRead
} ) {
}); if (err != null) {
} else { return callback(err)
logger.log({state, basePath}, "writing sync state"); }
const resourceList = (Array.from(resources).map((resource) => resource.path)); if (bytesRead === size) {
return fs.writeFile(stateFile, [...Array.from(resourceList), `stateHash:${state}`].join("\n"), callback); logger.error(
} { file: stateFile, size, bytesRead },
}, 'project state file truncated'
)
}
const array =
__guard__(result != null ? result.toString() : undefined, x =>
x.split('\n')
) || []
const adjustedLength = Math.max(array.length, 1)
const resourceList = array.slice(0, adjustedLength - 1)
const oldState = array[adjustedLength - 1]
const newState = `stateHash:${state}`
logger.log(
{ state, oldState, basePath, stateMatches: newState === oldState },
'checking sync state'
)
if (newState !== oldState) {
return callback(
new Errors.FilesOutOfSyncError('invalid state for incremental update')
)
} else {
const resources = Array.from(resourceList).map(path => ({ path }))
return callback(null, resources)
}
})
},
checkProjectStateMatches(state, basePath, callback) { checkResourceFiles(resources, allFiles, basePath, callback) {
if (callback == null) { callback = function(error, resources) {}; } // check the paths are all relative to current directory
const stateFile = Path.join(basePath, this.SYNC_STATE_FILE); let file
const size = this.SYNC_STATE_MAX_SIZE; if (callback == null) {
return SafeReader.readFile(stateFile, size, 'utf8', function(err, result, bytesRead) { callback = function(error) {}
if (err != null) { return callback(err); } }
if (bytesRead === size) { for (file of Array.from(resources || [])) {
logger.error({file:stateFile, size, bytesRead}, "project state file truncated"); for (const dir of Array.from(
} __guard__(file != null ? file.path : undefined, x => x.split('/'))
const array = __guard__(result != null ? result.toString() : undefined, x => x.split("\n")) || []; )) {
const adjustedLength = Math.max(array.length, 1); if (dir === '..') {
const resourceList = array.slice(0, adjustedLength - 1); return callback(new Error('relative path in resource file list'))
const oldState = array[adjustedLength - 1]; }
const newState = `stateHash:${state}`; }
logger.log({state, oldState, basePath, stateMatches: (newState === oldState)}, "checking sync state"); }
if (newState !== oldState) { // check if any of the input files are not present in list of files
return callback(new Errors.FilesOutOfSyncError("invalid state for incremental update")); const seenFile = {}
} else { for (file of Array.from(allFiles)) {
const resources = (Array.from(resourceList).map((path) => ({path}))); seenFile[file] = true
return callback(null, resources); }
} const missingFiles = Array.from(resources)
}); .filter(resource => !seenFile[resource.path])
}, .map(resource => resource.path)
if ((missingFiles != null ? missingFiles.length : undefined) > 0) {
checkResourceFiles(resources, allFiles, basePath, callback) { logger.err(
// check the paths are all relative to current directory { missingFiles, basePath, allFiles, resources },
let file; 'missing input files for project'
if (callback == null) { callback = function(error) {}; } )
for (file of Array.from(resources || [])) { return callback(
for (const dir of Array.from(__guard__(file != null ? file.path : undefined, x => x.split('/')))) { new Errors.FilesOutOfSyncError(
if (dir === '..') { 'resource files missing in incremental update'
return callback(new Error("relative path in resource file list")); )
} )
} } else {
} return callback()
// check if any of the input files are not present in list of files }
const seenFile = {}; }
for (file of Array.from(allFiles)) { }
seenFile[file] = true;
}
const missingFiles = (Array.from(resources).filter((resource) => !seenFile[resource.path]).map((resource) => resource.path));
if ((missingFiles != null ? missingFiles.length : undefined) > 0) {
logger.err({missingFiles, basePath, allFiles, resources}, "missing input files for project");
return callback(new Errors.FilesOutOfSyncError("resource files missing in incremental update"));
} else {
return callback();
}
}
});
function __guard__(value, transform) { function __guard__(value, transform) {
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; return typeof value !== 'undefined' && value !== null
} ? transform(value)
: undefined
}

View File

@@ -14,202 +14,339 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let ResourceWriter; let ResourceWriter
const UrlCache = require("./UrlCache"); const UrlCache = require('./UrlCache')
const Path = require("path"); const Path = require('path')
const fs = require("fs"); const fs = require('fs')
const async = require("async"); const async = require('async')
const mkdirp = require("mkdirp"); const mkdirp = require('mkdirp')
const OutputFileFinder = require("./OutputFileFinder"); const OutputFileFinder = require('./OutputFileFinder')
const ResourceStateManager = require("./ResourceStateManager"); const ResourceStateManager = require('./ResourceStateManager')
const Metrics = require("./Metrics"); const Metrics = require('./Metrics')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const settings = require("settings-sharelatex"); const settings = require('settings-sharelatex')
const parallelFileDownloads = settings.parallelFileDownloads || 1; const parallelFileDownloads = settings.parallelFileDownloads || 1
module.exports = (ResourceWriter = { module.exports = ResourceWriter = {
syncResourcesToDisk(request, basePath, callback) {
if (callback == null) {
callback = function(error, resourceList) {}
}
if (request.syncType === 'incremental') {
logger.log(
{ project_id: request.project_id, user_id: request.user_id },
'incremental sync'
)
return ResourceStateManager.checkProjectStateMatches(
request.syncState,
basePath,
function(error, resourceList) {
if (error != null) {
return callback(error)
}
return ResourceWriter._removeExtraneousFiles(
resourceList,
basePath,
function(error, outputFiles, allFiles) {
if (error != null) {
return callback(error)
}
return ResourceStateManager.checkResourceFiles(
resourceList,
allFiles,
basePath,
function(error) {
if (error != null) {
return callback(error)
}
return ResourceWriter.saveIncrementalResourcesToDisk(
request.project_id,
request.resources,
basePath,
function(error) {
if (error != null) {
return callback(error)
}
return callback(null, resourceList)
}
)
}
)
}
)
}
)
} else {
logger.log(
{ project_id: request.project_id, user_id: request.user_id },
'full sync'
)
return this.saveAllResourcesToDisk(
request.project_id,
request.resources,
basePath,
function(error) {
if (error != null) {
return callback(error)
}
return ResourceStateManager.saveProjectState(
request.syncState,
request.resources,
basePath,
function(error) {
if (error != null) {
return callback(error)
}
return callback(null, request.resources)
}
)
}
)
}
},
syncResourcesToDisk(request, basePath, callback) { saveIncrementalResourcesToDisk(project_id, resources, basePath, callback) {
if (callback == null) { callback = function(error, resourceList) {}; } if (callback == null) {
if (request.syncType === "incremental") { callback = function(error) {}
logger.log({project_id: request.project_id, user_id: request.user_id}, "incremental sync"); }
return ResourceStateManager.checkProjectStateMatches(request.syncState, basePath, function(error, resourceList) { return this._createDirectory(basePath, error => {
if (error != null) { return callback(error); } if (error != null) {
return ResourceWriter._removeExtraneousFiles(resourceList, basePath, function(error, outputFiles, allFiles) { return callback(error)
if (error != null) { return callback(error); } }
return ResourceStateManager.checkResourceFiles(resourceList, allFiles, basePath, function(error) { const jobs = Array.from(resources).map(resource =>
if (error != null) { return callback(error); } (resource => {
return ResourceWriter.saveIncrementalResourcesToDisk(request.project_id, request.resources, basePath, function(error) { return callback =>
if (error != null) { return callback(error); } this._writeResourceToDisk(project_id, resource, basePath, callback)
return callback(null, resourceList); })(resource)
}); )
}); return async.parallelLimit(jobs, parallelFileDownloads, callback)
}); })
}); },
} else {
logger.log({project_id: request.project_id, user_id: request.user_id}, "full sync");
return this.saveAllResourcesToDisk(request.project_id, request.resources, basePath, function(error) {
if (error != null) { return callback(error); }
return ResourceStateManager.saveProjectState(request.syncState, request.resources, basePath, function(error) {
if (error != null) { return callback(error); }
return callback(null, request.resources);
});
});
}
},
saveIncrementalResourcesToDisk(project_id, resources, basePath, callback) { saveAllResourcesToDisk(project_id, resources, basePath, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
return this._createDirectory(basePath, error => { callback = function(error) {}
if (error != null) { return callback(error); } }
const jobs = Array.from(resources).map((resource) => return this._createDirectory(basePath, error => {
(resource => { if (error != null) {
return callback => this._writeResourceToDisk(project_id, resource, basePath, callback); return callback(error)
})(resource)); }
return async.parallelLimit(jobs, parallelFileDownloads, callback); return this._removeExtraneousFiles(resources, basePath, error => {
}); if (error != null) {
}, return callback(error)
}
const jobs = Array.from(resources).map(resource =>
(resource => {
return callback =>
this._writeResourceToDisk(
project_id,
resource,
basePath,
callback
)
})(resource)
)
return async.parallelLimit(jobs, parallelFileDownloads, callback)
})
})
},
saveAllResourcesToDisk(project_id, resources, basePath, callback) { _createDirectory(basePath, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
return this._createDirectory(basePath, error => { callback = function(error) {}
if (error != null) { return callback(error); } }
return this._removeExtraneousFiles(resources, basePath, error => { return fs.mkdir(basePath, function(err) {
if (error != null) { return callback(error); } if (err != null) {
const jobs = Array.from(resources).map((resource) => if (err.code === 'EEXIST') {
(resource => { return callback()
return callback => this._writeResourceToDisk(project_id, resource, basePath, callback); } else {
})(resource)); logger.log({ err, dir: basePath }, 'error creating directory')
return async.parallelLimit(jobs, parallelFileDownloads, callback); return callback(err)
}); }
}); } else {
}, return callback()
}
})
},
_createDirectory(basePath, callback) { _removeExtraneousFiles(resources, basePath, _callback) {
if (callback == null) { callback = function(error) {}; } if (_callback == null) {
return fs.mkdir(basePath, function(err) { _callback = function(error, outputFiles, allFiles) {}
if (err != null) { }
if (err.code === 'EEXIST') { const timer = new Metrics.Timer('unlink-output-files')
return callback(); const callback = function(error, ...result) {
} else { timer.done()
logger.log({err, dir:basePath}, "error creating directory"); return _callback(error, ...Array.from(result))
return callback(err); }
}
} else {
return callback();
}
});
},
_removeExtraneousFiles(resources, basePath, _callback) { return OutputFileFinder.findOutputFiles(resources, basePath, function(
if (_callback == null) { _callback = function(error, outputFiles, allFiles) {}; } error,
const timer = new Metrics.Timer("unlink-output-files"); outputFiles,
const callback = function(error, ...result) { allFiles
timer.done(); ) {
return _callback(error, ...Array.from(result)); if (error != null) {
}; return callback(error)
}
return OutputFileFinder.findOutputFiles(resources, basePath, function(error, outputFiles, allFiles) { const jobs = []
if (error != null) { return callback(error); } for (const file of Array.from(outputFiles || [])) {
;(function(file) {
const { path } = file
let should_delete = true
if (
path.match(/^output\./) ||
path.match(/\.aux$/) ||
path.match(/^cache\//)
) {
// knitr cache
should_delete = false
}
if (path.match(/^output-.*/)) {
// Tikz cached figures (default case)
should_delete = false
}
if (path.match(/\.(pdf|dpth|md5)$/)) {
// Tikz cached figures (by extension)
should_delete = false
}
if (
path.match(/\.(pygtex|pygstyle)$/) ||
path.match(/(^|\/)_minted-[^\/]+\//)
) {
// minted files/directory
should_delete = false
}
if (
path.match(/\.md\.tex$/) ||
path.match(/(^|\/)_markdown_[^\/]+\//)
) {
// markdown files/directory
should_delete = false
}
if (path.match(/-eps-converted-to\.pdf$/)) {
// Epstopdf generated files
should_delete = false
}
if (
path === 'output.pdf' ||
path === 'output.dvi' ||
path === 'output.log' ||
path === 'output.xdv'
) {
should_delete = true
}
if (path === 'output.tex') {
// created by TikzManager if present in output files
should_delete = true
}
if (should_delete) {
return jobs.push(callback =>
ResourceWriter._deleteFileIfNotDirectory(
Path.join(basePath, path),
callback
)
)
}
})(file)
}
const jobs = []; return async.series(jobs, function(error) {
for (const file of Array.from(outputFiles || [])) { if (error != null) {
(function(file) { return callback(error)
const { path } = file; }
let should_delete = true; return callback(null, outputFiles, allFiles)
if (path.match(/^output\./) || path.match(/\.aux$/) || path.match(/^cache\//)) { // knitr cache })
should_delete = false; })
} },
if (path.match(/^output-.*/)) { // Tikz cached figures (default case)
should_delete = false;
}
if (path.match(/\.(pdf|dpth|md5)$/)) { // Tikz cached figures (by extension)
should_delete = false;
}
if (path.match(/\.(pygtex|pygstyle)$/) || path.match(/(^|\/)_minted-[^\/]+\//)) { // minted files/directory
should_delete = false;
}
if (path.match(/\.md\.tex$/) || path.match(/(^|\/)_markdown_[^\/]+\//)) { // markdown files/directory
should_delete = false;
}
if (path.match(/-eps-converted-to\.pdf$/)) { // Epstopdf generated files
should_delete = false;
}
if ((path === "output.pdf") || (path === "output.dvi") || (path === "output.log") || (path === "output.xdv")) {
should_delete = true;
}
if (path === "output.tex") { // created by TikzManager if present in output files
should_delete = true;
}
if (should_delete) {
return jobs.push(callback => ResourceWriter._deleteFileIfNotDirectory(Path.join(basePath, path), callback));
}
})(file);
}
return async.series(jobs, function(error) { _deleteFileIfNotDirectory(path, callback) {
if (error != null) { return callback(error); } if (callback == null) {
return callback(null, outputFiles, allFiles); callback = function(error) {}
}); }
}); return fs.stat(path, function(error, stat) {
}, if (error != null && error.code === 'ENOENT') {
return callback()
} else if (error != null) {
logger.err(
{ err: error, path },
'error stating file in deleteFileIfNotDirectory'
)
return callback(error)
} else if (stat.isFile()) {
return fs.unlink(path, function(error) {
if (error != null) {
logger.err(
{ err: error, path },
'error removing file in deleteFileIfNotDirectory'
)
return callback(error)
} else {
return callback()
}
})
} else {
return callback()
}
})
},
_deleteFileIfNotDirectory(path, callback) { _writeResourceToDisk(project_id, resource, basePath, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
return fs.stat(path, function(error, stat) { callback = function(error) {}
if ((error != null) && (error.code === 'ENOENT')) { }
return callback(); return ResourceWriter.checkPath(basePath, resource.path, function(
} else if (error != null) { error,
logger.err({err: error, path}, "error stating file in deleteFileIfNotDirectory"); path
return callback(error); ) {
} else if (stat.isFile()) { if (error != null) {
return fs.unlink(path, function(error) { return callback(error)
if (error != null) { }
logger.err({err: error, path}, "error removing file in deleteFileIfNotDirectory"); return mkdirp(Path.dirname(path), function(error) {
return callback(error); if (error != null) {
} else { return callback(error)
return callback(); }
} // TODO: Don't overwrite file if it hasn't been modified
}); if (resource.url != null) {
} else { return UrlCache.downloadUrlToFile(
return callback(); project_id,
} resource.url,
}); path,
}, resource.modified,
function(err) {
if (err != null) {
logger.err(
{
err,
project_id,
path,
resource_url: resource.url,
modified: resource.modified
},
'error downloading file for resources'
)
}
return callback()
}
) // try and continue compiling even if http resource can not be downloaded at this time
} else {
const process = require('process')
fs.writeFile(path, resource.content, callback)
try {
let result
return (result = fs.lstatSync(path))
} catch (e) {}
}
})
})
},
_writeResourceToDisk(project_id, resource, basePath, callback) { checkPath(basePath, resourcePath, callback) {
if (callback == null) { callback = function(error) {}; } const path = Path.normalize(Path.join(basePath, resourcePath))
return ResourceWriter.checkPath(basePath, resource.path, function(error, path) { if (path.slice(0, basePath.length + 1) !== basePath + '/') {
if (error != null) { return callback(error); } return callback(new Error('resource path is outside root directory'))
return mkdirp(Path.dirname(path), function(error) { } else {
if (error != null) { return callback(error); } return callback(null, path)
// TODO: Don't overwrite file if it hasn't been modified }
if (resource.url != null) { }
return UrlCache.downloadUrlToFile(project_id, resource.url, path, resource.modified, function(err){ }
if (err != null) {
logger.err({err, project_id, path, resource_url:resource.url, modified:resource.modified}, "error downloading file for resources");
}
return callback();
}); // try and continue compiling even if http resource can not be downloaded at this time
} else {
const process = require("process");
fs.writeFile(path, resource.content, callback);
try {
let result;
return result = fs.lstatSync(path);
} catch (e) {}
}
});
});
},
checkPath(basePath, resourcePath, callback) {
const path = Path.normalize(Path.join(basePath, resourcePath));
if (path.slice(0, basePath.length + 1) !== (basePath + "/")) {
return callback(new Error("resource path is outside root directory"));
} else {
return callback(null, path);
}
}
});

View File

@@ -12,36 +12,49 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let SafeReader; let SafeReader
const fs = require("fs"); const fs = require('fs')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
module.exports = (SafeReader = { module.exports = SafeReader = {
// safely read up to size bytes from a file and return result as a
// string
// safely read up to size bytes from a file and return result as a readFile(file, size, encoding, callback) {
// string if (callback == null) {
callback = function(error, result) {}
}
return fs.open(file, 'r', function(err, fd) {
if (err != null && err.code === 'ENOENT') {
return callback()
}
if (err != null) {
return callback(err)
}
readFile(file, size, encoding, callback) { // safely return always closing the file
if (callback == null) { callback = function(error, result) {}; } const callbackWithClose = (err, ...result) =>
return fs.open(file, 'r', function(err, fd) { fs.close(fd, function(err1) {
if ((err != null) && (err.code === 'ENOENT')) { return callback(); } if (err != null) {
if (err != null) { return callback(err); } return callback(err)
}
// safely return always closing the file if (err1 != null) {
const callbackWithClose = (err, ...result) => return callback(err1)
fs.close(fd, function(err1) { }
if (err != null) { return callback(err); } return callback(null, ...Array.from(result))
if (err1 != null) { return callback(err1); } })
return callback(null, ...Array.from(result)); const buff = new Buffer(size, 0) // fill with zeros
}) return fs.read(fd, buff, 0, buff.length, 0, function(
; err,
bytesRead,
const buff = new Buffer(size, 0); // fill with zeros buffer
return fs.read(fd, buff, 0, buff.length, 0, function(err, bytesRead, buffer) { ) {
if (err != null) { return callbackWithClose(err); } if (err != null) {
const result = buffer.toString(encoding, 0, bytesRead); return callbackWithClose(err)
return callbackWithClose(null, result, bytesRead); }
}); const result = buffer.toString(encoding, 0, bytesRead)
}); return callbackWithClose(null, result, bytesRead)
} })
}); })
}
}

View File

@@ -14,59 +14,81 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let ForbidSymlinks; let ForbidSymlinks
const Path = require("path"); const Path = require('path')
const fs = require("fs"); const fs = require('fs')
const Settings = require("settings-sharelatex"); const Settings = require('settings-sharelatex')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const url = require("url"); const url = require('url')
module.exports = (ForbidSymlinks = function(staticFn, root, options) { module.exports = ForbidSymlinks = function(staticFn, root, options) {
const expressStatic = staticFn(root, options); const expressStatic = staticFn(root, options)
const basePath = Path.resolve(root); const basePath = Path.resolve(root)
return function(req, res, next) { return function(req, res, next) {
let file, project_id, result; let file, project_id, result
const path = __guard__(url.parse(req.url), x => x.pathname); const path = __guard__(url.parse(req.url), x => x.pathname)
// check that the path is of the form /project_id_or_name/path/to/file.log // check that the path is of the form /project_id_or_name/path/to/file.log
if (result = path.match(/^\/?([a-zA-Z0-9_-]+)\/(.*)/)) { if ((result = path.match(/^\/?([a-zA-Z0-9_-]+)\/(.*)/))) {
project_id = result[1]; project_id = result[1]
file = result[2]; file = result[2]
} else { } else {
logger.warn({path}, "unrecognized file request"); logger.warn({ path }, 'unrecognized file request')
return res.sendStatus(404); return res.sendStatus(404)
} }
// check that the file does not use a relative path // check that the file does not use a relative path
for (const dir of Array.from(file.split('/'))) { for (const dir of Array.from(file.split('/'))) {
if (dir === '..') { if (dir === '..') {
logger.warn({path}, "attempt to use a relative path"); logger.warn({ path }, 'attempt to use a relative path')
return res.sendStatus(404); return res.sendStatus(404)
} }
} }
// check that the requested path is normalized // check that the requested path is normalized
const requestedFsPath = `${basePath}/${project_id}/${file}`; const requestedFsPath = `${basePath}/${project_id}/${file}`
if (requestedFsPath !== Path.normalize(requestedFsPath)) { if (requestedFsPath !== Path.normalize(requestedFsPath)) {
logger.error({path: requestedFsPath}, "requestedFsPath is not normalized"); logger.error(
return res.sendStatus(404); { path: requestedFsPath },
} 'requestedFsPath is not normalized'
// check that the requested path is not a symlink )
return fs.realpath(requestedFsPath, function(err, realFsPath){ return res.sendStatus(404)
if (err != null) { }
if (err.code === 'ENOENT') { // check that the requested path is not a symlink
return res.sendStatus(404); return fs.realpath(requestedFsPath, function(err, realFsPath) {
} else { if (err != null) {
logger.error({err, requestedFsPath, realFsPath, path: req.params[0], project_id: req.params.project_id}, "error checking file access"); if (err.code === 'ENOENT') {
return res.sendStatus(500); return res.sendStatus(404)
} } else {
} else if (requestedFsPath !== realFsPath) { logger.error(
logger.warn({requestedFsPath, realFsPath, path: req.params[0], project_id: req.params.project_id}, "trying to access a different file (symlink), aborting"); {
return res.sendStatus(404); err,
} else { requestedFsPath,
return expressStatic(req, res, next); realFsPath,
} path: req.params[0],
}); project_id: req.params.project_id
}; },
}); 'error checking file access'
)
return res.sendStatus(500)
}
} else if (requestedFsPath !== realFsPath) {
logger.warn(
{
requestedFsPath,
realFsPath,
path: req.params[0],
project_id: req.params.project_id
},
'trying to access a different file (symlink), aborting'
)
return res.sendStatus(404)
} else {
return expressStatic(req, res, next)
}
})
}
}
function __guard__(value, transform) { function __guard__(value, transform) {
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; return typeof value !== 'undefined' && value !== null
} ? transform(value)
: undefined
}

View File

@@ -11,52 +11,84 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let TikzManager; let TikzManager
const fs = require("fs"); const fs = require('fs')
const Path = require("path"); const Path = require('path')
const ResourceWriter = require("./ResourceWriter"); const ResourceWriter = require('./ResourceWriter')
const SafeReader = require("./SafeReader"); const SafeReader = require('./SafeReader')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
// for \tikzexternalize or pstool to work the main file needs to match the // for \tikzexternalize or pstool to work the main file needs to match the
// jobname. Since we set the -jobname to output, we have to create a // jobname. Since we set the -jobname to output, we have to create a
// copy of the main file as 'output.tex'. // copy of the main file as 'output.tex'.
module.exports = (TikzManager = { module.exports = TikzManager = {
checkMainFile(compileDir, mainFile, resources, callback) {
// if there's already an output.tex file, we don't want to touch it
if (callback == null) {
callback = function(error, needsMainFile) {}
}
for (const resource of Array.from(resources)) {
if (resource.path === 'output.tex') {
logger.log({ compileDir, mainFile }, 'output.tex already in resources')
return callback(null, false)
}
}
// if there's no output.tex, see if we are using tikz/pgf or pstool in the main file
return ResourceWriter.checkPath(compileDir, mainFile, function(
error,
path
) {
if (error != null) {
return callback(error)
}
return SafeReader.readFile(path, 65536, 'utf8', function(error, content) {
if (error != null) {
return callback(error)
}
const usesTikzExternalize =
(content != null
? content.indexOf('\\tikzexternalize')
: undefined) >= 0
const usesPsTool =
(content != null ? content.indexOf('{pstool}') : undefined) >= 0
logger.log(
{ compileDir, mainFile, usesTikzExternalize, usesPsTool },
'checked for packages needing main file as output.tex'
)
const needsMainFile = usesTikzExternalize || usesPsTool
return callback(null, needsMainFile)
})
})
},
checkMainFile(compileDir, mainFile, resources, callback) { injectOutputFile(compileDir, mainFile, callback) {
// if there's already an output.tex file, we don't want to touch it if (callback == null) {
if (callback == null) { callback = function(error, needsMainFile) {}; } callback = function(error) {}
for (const resource of Array.from(resources)) { }
if (resource.path === "output.tex") { return ResourceWriter.checkPath(compileDir, mainFile, function(
logger.log({compileDir, mainFile}, "output.tex already in resources"); error,
return callback(null, false); path
} ) {
} if (error != null) {
// if there's no output.tex, see if we are using tikz/pgf or pstool in the main file return callback(error)
return ResourceWriter.checkPath(compileDir, mainFile, function(error, path) { }
if (error != null) { return callback(error); } return fs.readFile(path, 'utf8', function(error, content) {
return SafeReader.readFile(path, 65536, "utf8", function(error, content) { if (error != null) {
if (error != null) { return callback(error); } return callback(error)
const usesTikzExternalize = (content != null ? content.indexOf("\\tikzexternalize") : undefined) >= 0; }
const usesPsTool = (content != null ? content.indexOf("{pstool}") : undefined) >= 0; logger.log(
logger.log({compileDir, mainFile, usesTikzExternalize, usesPsTool}, "checked for packages needing main file as output.tex"); { compileDir, mainFile },
const needsMainFile = (usesTikzExternalize || usesPsTool); 'copied file to output.tex as project uses packages which require it'
return callback(null, needsMainFile); )
}); // use wx flag to ensure that output file does not already exist
}); return fs.writeFile(
}, Path.join(compileDir, 'output.tex'),
content,
injectOutputFile(compileDir, mainFile, callback) { { flag: 'wx' },
if (callback == null) { callback = function(error) {}; } callback
return ResourceWriter.checkPath(compileDir, mainFile, function(error, path) { )
if (error != null) { return callback(error); } })
return fs.readFile(path, "utf8", function(error, content) { })
if (error != null) { return callback(error); } }
logger.log({compileDir, mainFile}, "copied file to output.tex as project uses packages which require it"); }
// use wx flag to ensure that output file does not already exist
return fs.writeFile(Path.join(compileDir, "output.tex"), content, {flag:'wx'}, callback);
});
});
}
});

View File

@@ -12,185 +12,267 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let UrlCache; let UrlCache
const db = require("./db"); const db = require('./db')
const dbQueue = require("./DbQueue"); const dbQueue = require('./DbQueue')
const UrlFetcher = require("./UrlFetcher"); const UrlFetcher = require('./UrlFetcher')
const Settings = require("settings-sharelatex"); const Settings = require('settings-sharelatex')
const crypto = require("crypto"); const crypto = require('crypto')
const fs = require("fs"); const fs = require('fs')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const async = require("async"); const async = require('async')
module.exports = (UrlCache = { module.exports = UrlCache = {
downloadUrlToFile(project_id, url, destPath, lastModified, callback) { downloadUrlToFile(project_id, url, destPath, lastModified, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
return UrlCache._ensureUrlIsInCache(project_id, url, lastModified, (error, pathToCachedUrl) => { callback = function(error) {}
if (error != null) { return callback(error); } }
return UrlCache._copyFile(pathToCachedUrl, destPath, function(error) { return UrlCache._ensureUrlIsInCache(
if (error != null) { project_id,
return UrlCache._clearUrlDetails(project_id, url, () => callback(error)); url,
} else { lastModified,
return callback(error); (error, pathToCachedUrl) => {
} if (error != null) {
}); return callback(error)
}); }
}, return UrlCache._copyFile(pathToCachedUrl, destPath, function(error) {
if (error != null) {
return UrlCache._clearUrlDetails(project_id, url, () =>
callback(error)
)
} else {
return callback(error)
}
})
}
)
},
clearProject(project_id, callback) { clearProject(project_id, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
return UrlCache._findAllUrlsInProject(project_id, function(error, urls) { callback = function(error) {}
logger.log({project_id, url_count: urls.length}, "clearing project URLs"); }
if (error != null) { return callback(error); } return UrlCache._findAllUrlsInProject(project_id, function(error, urls) {
const jobs = (Array.from(urls || [])).map((url) => logger.log(
(url => { project_id, url_count: urls.length },
callback => 'clearing project URLs'
UrlCache._clearUrlFromCache(project_id, url, function(error) { )
if (error != null) { if (error != null) {
logger.error({err: error, project_id, url}, "error clearing project URL"); return callback(error)
} }
return callback(); const jobs = Array.from(urls || []).map(url =>
}) (url => callback =>
UrlCache._clearUrlFromCache(project_id, url, function(error) {
)(url)); if (error != null) {
return async.series(jobs, callback); logger.error(
}); { err: error, project_id, url },
}, 'error clearing project URL'
)
}
return callback()
}))(url)
)
return async.series(jobs, callback)
})
},
_ensureUrlIsInCache(project_id, url, lastModified, callback) { _ensureUrlIsInCache(project_id, url, lastModified, callback) {
if (callback == null) { callback = function(error, pathOnDisk) {}; } if (callback == null) {
if (lastModified != null) { callback = function(error, pathOnDisk) {}
// MYSQL only stores dates to an accuracy of a second but the incoming lastModified might have milliseconds. }
// So round down to seconds if (lastModified != null) {
lastModified = new Date(Math.floor(lastModified.getTime() / 1000) * 1000); // MYSQL only stores dates to an accuracy of a second but the incoming lastModified might have milliseconds.
} // So round down to seconds
return UrlCache._doesUrlNeedDownloading(project_id, url, lastModified, (error, needsDownloading) => { lastModified = new Date(Math.floor(lastModified.getTime() / 1000) * 1000)
if (error != null) { return callback(error); } }
if (needsDownloading) { return UrlCache._doesUrlNeedDownloading(
logger.log({url, lastModified}, "downloading URL"); project_id,
return UrlFetcher.pipeUrlToFile(url, UrlCache._cacheFilePathForUrl(project_id, url), error => { url,
if (error != null) { return callback(error); } lastModified,
return UrlCache._updateOrCreateUrlDetails(project_id, url, lastModified, error => { (error, needsDownloading) => {
if (error != null) { return callback(error); } if (error != null) {
return callback(null, UrlCache._cacheFilePathForUrl(project_id, url)); return callback(error)
}); }
}); if (needsDownloading) {
} else { logger.log({ url, lastModified }, 'downloading URL')
logger.log({url, lastModified}, "URL is up to date in cache"); return UrlFetcher.pipeUrlToFile(
return callback(null, UrlCache._cacheFilePathForUrl(project_id, url)); url,
} UrlCache._cacheFilePathForUrl(project_id, url),
}); error => {
}, if (error != null) {
return callback(error)
_doesUrlNeedDownloading(project_id, url, lastModified, callback) { }
if (callback == null) { callback = function(error, needsDownloading) {}; } return UrlCache._updateOrCreateUrlDetails(
if ((lastModified == null)) { project_id,
return callback(null, true); url,
} lastModified,
return UrlCache._findUrlDetails(project_id, url, function(error, urlDetails) { error => {
if (error != null) { return callback(error); } if (error != null) {
if ((urlDetails == null) || (urlDetails.lastModified == null) || (urlDetails.lastModified.getTime() < lastModified.getTime())) { return callback(error)
return callback(null, true); }
} else { return callback(
return callback(null, false); null,
} UrlCache._cacheFilePathForUrl(project_id, url)
}); )
}, }
)
}
)
} else {
logger.log({ url, lastModified }, 'URL is up to date in cache')
return callback(null, UrlCache._cacheFilePathForUrl(project_id, url))
}
}
)
},
_cacheFileNameForUrl(project_id, url) { _doesUrlNeedDownloading(project_id, url, lastModified, callback) {
return project_id + ":" + crypto.createHash("md5").update(url).digest("hex"); if (callback == null) {
}, callback = function(error, needsDownloading) {}
}
if (lastModified == null) {
return callback(null, true)
}
return UrlCache._findUrlDetails(project_id, url, function(
error,
urlDetails
) {
if (error != null) {
return callback(error)
}
if (
urlDetails == null ||
urlDetails.lastModified == null ||
urlDetails.lastModified.getTime() < lastModified.getTime()
) {
return callback(null, true)
} else {
return callback(null, false)
}
})
},
_cacheFilePathForUrl(project_id, url) { _cacheFileNameForUrl(project_id, url) {
return `${Settings.path.clsiCacheDir}/${UrlCache._cacheFileNameForUrl(project_id, url)}`; return (
}, project_id +
':' +
crypto
.createHash('md5')
.update(url)
.digest('hex')
)
},
_copyFile(from, to, _callback) { _cacheFilePathForUrl(project_id, url) {
if (_callback == null) { _callback = function(error) {}; } return `${Settings.path.clsiCacheDir}/${UrlCache._cacheFileNameForUrl(
const callbackOnce = function(error) { project_id,
if (error != null) { url
logger.error({err: error, from, to}, "error copying file from cache"); )}`
} },
_callback(error);
return _callback = function() {};
};
const writeStream = fs.createWriteStream(to);
const readStream = fs.createReadStream(from);
writeStream.on("error", callbackOnce);
readStream.on("error", callbackOnce);
writeStream.on("close", callbackOnce);
return writeStream.on("open", () => readStream.pipe(writeStream));
},
_clearUrlFromCache(project_id, url, callback) { _copyFile(from, to, _callback) {
if (callback == null) { callback = function(error) {}; } if (_callback == null) {
return UrlCache._clearUrlDetails(project_id, url, function(error) { _callback = function(error) {}
if (error != null) { return callback(error); } }
return UrlCache._deleteUrlCacheFromDisk(project_id, url, function(error) { const callbackOnce = function(error) {
if (error != null) { return callback(error); } if (error != null) {
return callback(null); logger.error({ err: error, from, to }, 'error copying file from cache')
}); }
}); _callback(error)
}, return (_callback = function() {})
}
const writeStream = fs.createWriteStream(to)
const readStream = fs.createReadStream(from)
writeStream.on('error', callbackOnce)
readStream.on('error', callbackOnce)
writeStream.on('close', callbackOnce)
return writeStream.on('open', () => readStream.pipe(writeStream))
},
_deleteUrlCacheFromDisk(project_id, url, callback) { _clearUrlFromCache(project_id, url, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
return fs.unlink(UrlCache._cacheFilePathForUrl(project_id, url), function(error) { callback = function(error) {}
if ((error != null) && (error.code !== 'ENOENT')) { // no error if the file isn't present }
return callback(error); return UrlCache._clearUrlDetails(project_id, url, function(error) {
} else { if (error != null) {
return callback(); return callback(error)
} }
}); return UrlCache._deleteUrlCacheFromDisk(project_id, url, function(error) {
}, if (error != null) {
return callback(error)
}
return callback(null)
})
})
},
_findUrlDetails(project_id, url, callback) { _deleteUrlCacheFromDisk(project_id, url, callback) {
if (callback == null) { callback = function(error, urlDetails) {}; } if (callback == null) {
const job = cb=> callback = function(error) {}
db.UrlCache.find({where: { url, project_id }}) }
.then(urlDetails => cb(null, urlDetails)) return fs.unlink(UrlCache._cacheFilePathForUrl(project_id, url), function(
.error(cb) error
; ) {
return dbQueue.queue.push(job, callback); if (error != null && error.code !== 'ENOENT') {
}, // no error if the file isn't present
return callback(error)
} else {
return callback()
}
})
},
_updateOrCreateUrlDetails(project_id, url, lastModified, callback) { _findUrlDetails(project_id, url, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
const job = cb=> callback = function(error, urlDetails) {}
db.UrlCache.findOrCreate({where: {url, project_id}}) }
.spread( const job = cb =>
(urlDetails, created) => db.UrlCache.find({ where: { url, project_id } })
urlDetails.updateAttributes({lastModified}) .then(urlDetails => cb(null, urlDetails))
.then(() => cb()) .error(cb)
.error(cb) return dbQueue.queue.push(job, callback)
) },
.error(cb)
;
return dbQueue.queue.push(job, callback);
},
_clearUrlDetails(project_id, url, callback) { _updateOrCreateUrlDetails(project_id, url, lastModified, callback) {
if (callback == null) { callback = function(error) {}; } if (callback == null) {
const job = cb=> callback = function(error) {}
db.UrlCache.destroy({where: {url, project_id}}) }
.then(() => cb(null)) const job = cb =>
.error(cb) db.UrlCache.findOrCreate({ where: { url, project_id } })
; .spread((urlDetails, created) =>
return dbQueue.queue.push(job, callback); urlDetails
}, .updateAttributes({ lastModified })
.then(() => cb())
.error(cb)
)
.error(cb)
return dbQueue.queue.push(job, callback)
},
_clearUrlDetails(project_id, url, callback) {
if (callback == null) {
callback = function(error) {}
}
const job = cb =>
db.UrlCache.destroy({ where: { url, project_id } })
.then(() => cb(null))
.error(cb)
return dbQueue.queue.push(job, callback)
},
_findAllUrlsInProject(project_id, callback) { _findAllUrlsInProject(project_id, callback) {
if (callback == null) { callback = function(error, urls) {}; } if (callback == null) {
const job = cb=> callback = function(error, urls) {}
db.UrlCache.findAll({where: { project_id }}) }
.then( const job = cb =>
urlEntries => cb(null, urlEntries.map(entry => entry.url))) db.UrlCache.findAll({ where: { project_id } })
.error(cb) .then(urlEntries =>
; cb(
return dbQueue.queue.push(job, callback); null,
} urlEntries.map(entry => entry.url)
}); )
)
.error(cb)
return dbQueue.queue.push(job, callback)
}
}

View File

@@ -12,85 +12,109 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let UrlFetcher; let UrlFetcher
const request = require("request").defaults({jar: false}); const request = require('request').defaults({ jar: false })
const fs = require("fs"); const fs = require('fs')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const settings = require("settings-sharelatex"); const settings = require('settings-sharelatex')
const URL = require('url'); const URL = require('url')
const oneMinute = 60 * 1000; const oneMinute = 60 * 1000
module.exports = (UrlFetcher = { module.exports = UrlFetcher = {
pipeUrlToFile(url, filePath, _callback) { pipeUrlToFile(url, filePath, _callback) {
if (_callback == null) { _callback = function(error) {}; } if (_callback == null) {
const callbackOnce = function(error) { _callback = function(error) {}
if (timeoutHandler != null) { clearTimeout(timeoutHandler); } }
_callback(error); const callbackOnce = function(error) {
return _callback = function() {}; if (timeoutHandler != null) {
}; clearTimeout(timeoutHandler)
}
_callback(error)
return (_callback = function() {})
}
if (settings.filestoreDomainOveride != null) { if (settings.filestoreDomainOveride != null) {
const p = URL.parse(url).path; const p = URL.parse(url).path
url = `${settings.filestoreDomainOveride}${p}`; url = `${settings.filestoreDomainOveride}${p}`
} }
var timeoutHandler = setTimeout(function() { var timeoutHandler = setTimeout(
timeoutHandler = null; function() {
logger.error({url, filePath}, "Timed out downloading file to cache"); timeoutHandler = null
return callbackOnce(new Error(`Timed out downloading file to cache ${url}`)); logger.error({ url, filePath }, 'Timed out downloading file to cache')
} return callbackOnce(
// FIXME: maybe need to close fileStream here new Error(`Timed out downloading file to cache ${url}`)
, 3 * oneMinute); )
},
// FIXME: maybe need to close fileStream here
3 * oneMinute
)
logger.log({url, filePath}, "started downloading url to cache"); logger.log({ url, filePath }, 'started downloading url to cache')
const urlStream = request.get({url, timeout: oneMinute}); const urlStream = request.get({ url, timeout: oneMinute })
urlStream.pause(); // stop data flowing until we are ready urlStream.pause() // stop data flowing until we are ready
// attach handlers before setting up pipes // attach handlers before setting up pipes
urlStream.on("error", function(error) { urlStream.on('error', function(error) {
logger.error({err: error, url, filePath}, "error downloading url"); logger.error({ err: error, url, filePath }, 'error downloading url')
return callbackOnce(error || new Error(`Something went wrong downloading the URL ${url}`)); return callbackOnce(
}); error || new Error(`Something went wrong downloading the URL ${url}`)
)
})
urlStream.on("end", () => logger.log({url, filePath}, "finished downloading file into cache")); urlStream.on('end', () =>
logger.log({ url, filePath }, 'finished downloading file into cache')
)
return urlStream.on("response", function(res) { return urlStream.on('response', function(res) {
if ((res.statusCode >= 200) && (res.statusCode < 300)) { if (res.statusCode >= 200 && res.statusCode < 300) {
const fileStream = fs.createWriteStream(filePath); const fileStream = fs.createWriteStream(filePath)
// attach handlers before setting up pipes // attach handlers before setting up pipes
fileStream.on('error', function(error) { fileStream.on('error', function(error) {
logger.error({err: error, url, filePath}, "error writing file into cache"); logger.error(
return fs.unlink(filePath, function(err) { { err: error, url, filePath },
if (err != null) { 'error writing file into cache'
logger.err({err, filePath}, "error deleting file from cache"); )
} return fs.unlink(filePath, function(err) {
return callbackOnce(error); if (err != null) {
}); logger.err({ err, filePath }, 'error deleting file from cache')
}); }
return callbackOnce(error)
})
})
fileStream.on('finish', function() { fileStream.on('finish', function() {
logger.log({url, filePath}, "finished writing file into cache"); logger.log({ url, filePath }, 'finished writing file into cache')
return callbackOnce(); return callbackOnce()
}); })
fileStream.on('pipe', () => logger.log({url, filePath}, "piping into filestream")); fileStream.on('pipe', () =>
logger.log({ url, filePath }, 'piping into filestream')
)
urlStream.pipe(fileStream); urlStream.pipe(fileStream)
return urlStream.resume(); // now we are ready to handle the data return urlStream.resume() // now we are ready to handle the data
} else { } else {
logger.error({statusCode: res.statusCode, url, filePath}, "unexpected status code downloading url to cache"); logger.error(
// https://nodejs.org/api/http.html#http_class_http_clientrequest { statusCode: res.statusCode, url, filePath },
// If you add a 'response' event handler, then you must consume 'unexpected status code downloading url to cache'
// the data from the response object, either by calling )
// response.read() whenever there is a 'readable' event, or by // https://nodejs.org/api/http.html#http_class_http_clientrequest
// adding a 'data' handler, or by calling the .resume() // If you add a 'response' event handler, then you must consume
// method. Until the data is consumed, the 'end' event will not // the data from the response object, either by calling
// fire. Also, until the data is read it will consume memory // response.read() whenever there is a 'readable' event, or by
// that can eventually lead to a 'process out of memory' error. // adding a 'data' handler, or by calling the .resume()
urlStream.resume(); // discard the data // method. Until the data is consumed, the 'end' event will not
return callbackOnce(new Error(`URL returned non-success status code: ${res.statusCode} ${url}`)); // fire. Also, until the data is read it will consume memory
} // that can eventually lead to a 'process out of memory' error.
}); urlStream.resume() // discard the data
} return callbackOnce(
}); new Error(
`URL returned non-success status code: ${res.statusCode} ${url}`
)
)
}
})
}
}

View File

@@ -8,57 +8,60 @@
* DS102: Remove unnecessary code created because of implicit returns * DS102: Remove unnecessary code created because of implicit returns
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
const Sequelize = require("sequelize"); const Sequelize = require('sequelize')
const Settings = require("settings-sharelatex"); const Settings = require('settings-sharelatex')
const _ = require("underscore"); const _ = require('underscore')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
const options = _.extend({logging:false}, Settings.mysql.clsi); const options = _.extend({ logging: false }, Settings.mysql.clsi)
logger.log({dbPath:Settings.mysql.clsi.storage}, "connecting to db"); logger.log({ dbPath: Settings.mysql.clsi.storage }, 'connecting to db')
const sequelize = new Sequelize( const sequelize = new Sequelize(
Settings.mysql.clsi.database, Settings.mysql.clsi.database,
Settings.mysql.clsi.username, Settings.mysql.clsi.username,
Settings.mysql.clsi.password, Settings.mysql.clsi.password,
options options
); )
if (Settings.mysql.clsi.dialect === "sqlite") { if (Settings.mysql.clsi.dialect === 'sqlite') {
logger.log("running PRAGMA journal_mode=WAL;"); logger.log('running PRAGMA journal_mode=WAL;')
sequelize.query("PRAGMA journal_mode=WAL;"); sequelize.query('PRAGMA journal_mode=WAL;')
sequelize.query("PRAGMA synchronous=OFF;"); sequelize.query('PRAGMA synchronous=OFF;')
sequelize.query("PRAGMA read_uncommitted = true;"); sequelize.query('PRAGMA read_uncommitted = true;')
} }
module.exports = { module.exports = {
UrlCache: sequelize.define("UrlCache", { UrlCache: sequelize.define(
url: Sequelize.STRING, 'UrlCache',
project_id: Sequelize.STRING, {
lastModified: Sequelize.DATE url: Sequelize.STRING,
}, { project_id: Sequelize.STRING,
indexes: [ lastModified: Sequelize.DATE
{fields: ['url', 'project_id']}, },
{fields: ['project_id']} {
] indexes: [{ fields: ['url', 'project_id'] }, { fields: ['project_id'] }]
}), }
),
Project: sequelize.define("Project", { Project: sequelize.define(
project_id: {type: Sequelize.STRING, primaryKey: true}, 'Project',
lastAccessed: Sequelize.DATE {
}, { project_id: { type: Sequelize.STRING, primaryKey: true },
indexes: [ lastAccessed: Sequelize.DATE
{fields: ['lastAccessed']} },
] {
}), indexes: [{ fields: ['lastAccessed'] }]
}
),
op: Sequelize.Op, op: Sequelize.Op,
sync() {
logger.log({dbPath:Settings.mysql.clsi.storage}, "syncing db schema");
return sequelize.sync()
.then(() => logger.log("db sync complete")).catch(err=> console.log(err, "error syncing"));
}
};
sync() {
logger.log({ dbPath: Settings.mysql.clsi.storage }, 'syncing db schema')
return sequelize
.sync()
.then(() => logger.log('db sync complete'))
.catch(err => console.log(err, 'error syncing'))
}
}