[misc] bump the dev-env to 3.3.2

This commit is contained in:
Jakob Ackermann
2020-08-10 17:01:11 +01:00
parent 1ee48d0274
commit f4561c2fe2
66 changed files with 1371 additions and 1458 deletions

View File

@@ -24,10 +24,10 @@ const Errors = require('./Errors')
module.exports = CompileController = {
compile(req, res, next) {
if (next == null) {
next = function(error) {}
next = function (error) {}
}
const timer = new Metrics.Timer('compile-request')
return RequestParser.parse(req.body, function(error, request) {
return RequestParser.parse(req.body, function (error, request) {
if (error != null) {
return next(error)
}
@@ -37,11 +37,11 @@ module.exports = CompileController = {
}
return ProjectPersistenceManager.markProjectAsJustAccessed(
request.project_id,
function(error) {
function (error) {
if (error != null) {
return next(error)
}
return CompileManager.doCompileWithLock(request, function(
return CompileManager.doCompileWithLock(request, function (
error,
outputFiles
) {
@@ -116,7 +116,7 @@ module.exports = CompileController = {
compile: {
status,
error: (error != null ? error.message : undefined) || error,
outputFiles: outputFiles.map(file => ({
outputFiles: outputFiles.map((file) => ({
url:
`${Settings.apis.clsi.url}/project/${request.project_id}` +
(request.user_id != null
@@ -138,7 +138,7 @@ module.exports = CompileController = {
stopCompile(req, res, next) {
const { project_id, user_id } = req.params
return CompileManager.stopCompile(project_id, user_id, function(error) {
return CompileManager.stopCompile(project_id, user_id, function (error) {
if (error != null) {
return next(error)
}
@@ -148,12 +148,12 @@ module.exports = CompileController = {
clearCache(req, res, next) {
if (next == null) {
next = function(error) {}
next = function (error) {}
}
return ProjectPersistenceManager.clearProject(
req.params.project_id,
req.params.user_id,
function(error) {
function (error) {
if (error != null) {
return next(error)
}
@@ -164,7 +164,7 @@ module.exports = CompileController = {
syncFromCode(req, res, next) {
if (next == null) {
next = function(error) {}
next = function (error) {}
}
const { file } = req.query
const line = parseInt(req.query.line, 10)
@@ -177,7 +177,7 @@ module.exports = CompileController = {
file,
line,
column,
function(error, pdfPositions) {
function (error, pdfPositions) {
if (error != null) {
return next(error)
}
@@ -190,29 +190,33 @@ module.exports = CompileController = {
syncFromPdf(req, res, next) {
if (next == null) {
next = function(error) {}
next = function (error) {}
}
const page = parseInt(req.query.page, 10)
const h = parseFloat(req.query.h)
const v = parseFloat(req.query.v)
const { project_id } = req.params
const { user_id } = req.params
return CompileManager.syncFromPdf(project_id, user_id, page, h, v, function(
error,
codePositions
) {
if (error != null) {
return next(error)
return CompileManager.syncFromPdf(
project_id,
user_id,
page,
h,
v,
function (error, codePositions) {
if (error != null) {
return next(error)
}
return res.json({
code: codePositions
})
}
return res.json({
code: codePositions
})
})
)
},
wordcount(req, res, next) {
if (next == null) {
next = function(error) {}
next = function (error) {}
}
const file = req.query.file || 'main.tex'
const { project_id } = req.params
@@ -229,7 +233,7 @@ module.exports = CompileController = {
}
logger.log({ image, file, project_id }, 'word count request')
return CompileManager.wordcount(project_id, user_id, file, image, function(
return CompileManager.wordcount(project_id, user_id, file, image, function (
error,
result
) {
@@ -244,7 +248,7 @@ module.exports = CompileController = {
status(req, res, next) {
if (next == null) {
next = function(error) {}
next = function (error) {}
}
return res.send('OK')
}

View File

@@ -35,7 +35,7 @@ const async = require('async')
const Errors = require('./Errors')
const CommandRunner = require('./CommandRunner')
const getCompileName = function(project_id, user_id) {
const getCompileName = function (project_id, user_id) {
if (user_id != null) {
return `${project_id}-${user_id}`
} else {
@@ -49,19 +49,19 @@ const getCompileDir = (project_id, user_id) =>
module.exports = CompileManager = {
doCompileWithLock(request, callback) {
if (callback == null) {
callback = function(error, outputFiles) {}
callback = function (error, outputFiles) {}
}
const compileDir = getCompileDir(request.project_id, request.user_id)
const lockFile = Path.join(compileDir, '.project-lock')
// use a .project-lock file in the compile directory to prevent
// simultaneous compiles
return fse.ensureDir(compileDir, function(error) {
return fse.ensureDir(compileDir, function (error) {
if (error != null) {
return callback(error)
}
return LockManager.runWithLock(
lockFile,
releaseLock => CompileManager.doCompile(request, releaseLock),
(releaseLock) => CompileManager.doCompile(request, releaseLock),
callback
)
})
@@ -69,7 +69,7 @@ module.exports = CompileManager = {
doCompile(request, callback) {
if (callback == null) {
callback = function(error, outputFiles) {}
callback = function (error, outputFiles) {}
}
const compileDir = getCompileDir(request.project_id, request.user_id)
let timer = new Metrics.Timer('write-to-disk')
@@ -77,7 +77,7 @@ module.exports = CompileManager = {
{ project_id: request.project_id, user_id: request.user_id },
'syncing resources to disk'
)
return ResourceWriter.syncResourcesToDisk(request, compileDir, function(
return ResourceWriter.syncResourcesToDisk(request, compileDir, function (
error,
resourceList
) {
@@ -109,7 +109,7 @@ module.exports = CompileManager = {
)
timer.done()
const injectDraftModeIfRequired = function(callback) {
const injectDraftModeIfRequired = function (callback) {
if (request.draft) {
return DraftModeManager.injectDraftMode(
Path.join(compileDir, request.rootResourcePath),
@@ -120,12 +120,12 @@ module.exports = CompileManager = {
}
}
const createTikzFileIfRequired = callback =>
const createTikzFileIfRequired = (callback) =>
TikzManager.checkMainFile(
compileDir,
request.rootResourcePath,
resourceList,
function(error, needsMainFile) {
function (error, needsMainFile) {
if (error != null) {
return callback(error)
}
@@ -165,7 +165,7 @@ module.exports = CompileManager = {
// apply a series of file modifications/creations for draft mode and tikz
return async.series(
[injectDraftModeIfRequired, createTikzFileIfRequired],
function(error) {
function (error) {
if (error != null) {
return callback(error)
}
@@ -177,9 +177,9 @@ module.exports = CompileManager = {
request.imageName != null
? request.imageName.match(/:(.*)/)
: undefined,
x1 => x1[1]
(x1) => x1[1]
),
x => x.replace(/\./g, '-')
(x) => x.replace(/\./g, '-')
) || 'default'
if (!request.project_id.match(/^[0-9a-f]{24}$/)) {
tag = 'other'
@@ -202,13 +202,11 @@ module.exports = CompileManager = {
environment: env,
compileGroup: request.compileGroup
},
function(error, output, stats, timings) {
function (error, output, stats, timings) {
// request was for validation only
let metric_key, metric_value
if (request.check === 'validate') {
const result = (error != null
? error.code
: undefined)
const result = (error != null ? error.code : undefined)
? 'fail'
: 'pass'
error = new Error('validation')
@@ -231,7 +229,7 @@ module.exports = CompileManager = {
OutputFileFinder.findOutputFiles(
resourceList,
compileDir,
function(err, outputFiles) {
function (err, outputFiles) {
if (err != null) {
return callback(err)
}
@@ -289,7 +287,7 @@ module.exports = CompileManager = {
return OutputFileFinder.findOutputFiles(
resourceList,
compileDir,
function(error, outputFiles) {
function (error, outputFiles) {
if (error != null) {
return callback(error)
}
@@ -309,7 +307,7 @@ module.exports = CompileManager = {
stopCompile(project_id, user_id, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
const compileName = getCompileName(project_id, user_id)
return LatexRunner.killLatex(compileName, callback)
@@ -317,16 +315,16 @@ module.exports = CompileManager = {
clearProject(project_id, user_id, _callback) {
if (_callback == null) {
_callback = function(error) {}
_callback = function (error) {}
}
const callback = function(error) {
const callback = function (error) {
_callback(error)
return (_callback = function() {})
return (_callback = function () {})
}
const compileDir = getCompileDir(project_id, user_id)
return CompileManager._checkDirectory(compileDir, function(err, exists) {
return CompileManager._checkDirectory(compileDir, function (err, exists) {
if (err != null) {
return callback(err)
}
@@ -339,9 +337,9 @@ module.exports = CompileManager = {
proc.on('error', callback)
let stderr = ''
proc.stderr.setEncoding('utf8').on('data', chunk => (stderr += chunk))
proc.stderr.setEncoding('utf8').on('data', (chunk) => (stderr += chunk))
return proc.on('close', function(code) {
return proc.on('close', function (code) {
if (code === 0) {
return callback(null)
} else {
@@ -353,26 +351,26 @@ module.exports = CompileManager = {
_findAllDirs(callback) {
if (callback == null) {
callback = function(error, allDirs) {}
callback = function (error, allDirs) {}
}
const root = Settings.path.compilesDir
return fs.readdir(root, function(err, files) {
return fs.readdir(root, function (err, files) {
if (err != null) {
return callback(err)
}
const allDirs = Array.from(files).map(file => Path.join(root, file))
const allDirs = Array.from(files).map((file) => Path.join(root, file))
return callback(null, allDirs)
})
},
clearExpiredProjects(max_cache_age_ms, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
const now = Date.now()
// action for each directory
const expireIfNeeded = (checkDir, cb) =>
fs.stat(checkDir, function(err, stats) {
fs.stat(checkDir, function (err, stats) {
if (err != null) {
return cb()
} // ignore errors checking directory
@@ -385,7 +383,7 @@ module.exports = CompileManager = {
}
})
// iterate over all project directories
return CompileManager._findAllDirs(function(error, allDirs) {
return CompileManager._findAllDirs(function (error, allDirs) {
if (error != null) {
return callback()
}
@@ -395,9 +393,9 @@ module.exports = CompileManager = {
_checkDirectory(compileDir, callback) {
if (callback == null) {
callback = function(error, exists) {}
callback = function (error, exists) {}
}
return fs.lstat(compileDir, function(err, stats) {
return fs.lstat(compileDir, function (err, stats) {
if ((err != null ? err.code : undefined) === 'ENOENT') {
return callback(null, false) // directory does not exist
} else if (err != null) {
@@ -423,7 +421,7 @@ module.exports = CompileManager = {
// might not match the file path on the host. The .synctex.gz file however, will be accessed
// wherever it is on the host.
if (callback == null) {
callback = function(error, pdfPositions) {}
callback = function (error, pdfPositions) {}
}
const compileName = getCompileName(project_id, user_id)
const base_dir = Settings.path.synctexBaseDir(compileName)
@@ -431,7 +429,7 @@ module.exports = CompileManager = {
const compileDir = getCompileDir(project_id, user_id)
const synctex_path = `${base_dir}/output.pdf`
const command = ['code', synctex_path, file_path, line, column]
CompileManager._runSynctex(project_id, user_id, command, function(
CompileManager._runSynctex(project_id, user_id, command, function (
error,
stdout
) {
@@ -448,14 +446,14 @@ module.exports = CompileManager = {
syncFromPdf(project_id, user_id, page, h, v, callback) {
if (callback == null) {
callback = function(error, filePositions) {}
callback = function (error, filePositions) {}
}
const compileName = getCompileName(project_id, user_id)
const compileDir = getCompileDir(project_id, user_id)
const base_dir = Settings.path.synctexBaseDir(compileName)
const synctex_path = `${base_dir}/output.pdf`
const command = ['pdf', synctex_path, page, h, v]
CompileManager._runSynctex(project_id, user_id, command, function(
CompileManager._runSynctex(project_id, user_id, command, function (
error,
stdout
) {
@@ -475,17 +473,17 @@ module.exports = CompileManager = {
_checkFileExists(dir, filename, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
const file = Path.join(dir, filename)
return fs.stat(dir, function(error, stats) {
return fs.stat(dir, function (error, stats) {
if ((error != null ? error.code : undefined) === 'ENOENT') {
return callback(new Errors.NotFoundError('no output directory'))
}
if (error != null) {
return callback(error)
}
return fs.stat(file, function(error, stats) {
return fs.stat(file, function (error, stats) {
if ((error != null ? error.code : undefined) === 'ENOENT') {
return callback(new Errors.NotFoundError('no output file'))
}
@@ -502,7 +500,7 @@ module.exports = CompileManager = {
_runSynctex(project_id, user_id, command, callback) {
if (callback == null) {
callback = function(error, stdout) {}
callback = function (error, stdout) {}
}
const seconds = 1000
@@ -512,7 +510,7 @@ module.exports = CompileManager = {
const timeout = 60 * 1000 // increased to allow for large projects
const compileName = getCompileName(project_id, user_id)
const compileGroup = 'synctex'
CompileManager._checkFileExists(directory, 'output.synctex.gz', error => {
CompileManager._checkFileExists(directory, 'output.synctex.gz', (error) => {
if (error) {
return callback(error)
}
@@ -526,7 +524,7 @@ module.exports = CompileManager = {
timeout,
{},
compileGroup,
function(error, output) {
function (error, output) {
if (error != null) {
logger.err(
{ err: error, command, project_id, user_id },
@@ -576,7 +574,7 @@ module.exports = CompileManager = {
wordcount(project_id, user_id, file_name, image, callback) {
if (callback == null) {
callback = function(error, pdfPositions) {}
callback = function (error, pdfPositions) {}
}
logger.log({ project_id, user_id, file_name, image }, 'running wordcount')
const file_path = `$COMPILE_DIR/${file_name}`
@@ -591,7 +589,7 @@ module.exports = CompileManager = {
const timeout = 60 * 1000
const compileName = getCompileName(project_id, user_id)
const compileGroup = 'wordcount'
return fse.ensureDir(compileDir, function(error) {
return fse.ensureDir(compileDir, function (error) {
if (error != null) {
logger.err(
{ error, project_id, user_id, file_name },
@@ -607,14 +605,14 @@ module.exports = CompileManager = {
timeout,
{},
compileGroup,
function(error) {
function (error) {
if (error != null) {
return callback(error)
}
return fs.readFile(
compileDir + '/' + file_name + '.wc',
'utf-8',
function(err, stdout) {
function (err, stdout) {
if (err != null) {
// call it node_err so sentry doesn't use random path error as unique id so it can't be ignored
logger.err(

View File

@@ -23,7 +23,7 @@ module.exports = LockManager = {
tryLock(key, callback) {
let lockValue
if (callback == null) {
callback = function(err, gotLock) {}
callback = function (err, gotLock) {}
}
const existingLock = LockState[key]
if (existingLock != null) {
@@ -46,11 +46,11 @@ module.exports = LockManager = {
getLock(key, callback) {
let attempt
if (callback == null) {
callback = function(error, lockValue) {}
callback = function (error, lockValue) {}
}
const startTime = Date.now()
return (attempt = () =>
LockManager.tryLock(key, function(error, gotLock, lockValue) {
LockManager.tryLock(key, function (error, gotLock, lockValue) {
if (error != null) {
return callback(error)
}
@@ -68,7 +68,7 @@ module.exports = LockManager = {
releaseLock(key, lockValue, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
const existingLock = LockState[key]
if (existingLock === lockValue) {
@@ -93,14 +93,14 @@ module.exports = LockManager = {
runWithLock(key, runner, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
return LockManager.getLock(key, function(error, lockValue) {
return LockManager.getLock(key, function (error, lockValue) {
if (error != null) {
return callback(error)
}
return runner((error1, ...args) =>
LockManager.releaseLock(key, lockValue, function(error2) {
LockManager.releaseLock(key, lockValue, function (error2) {
error = error1 || error2
if (error != null) {
return callback(error)

View File

@@ -32,7 +32,7 @@ logger.info('using docker runner')
const usingSiblingContainers = () =>
__guard__(
Settings != null ? Settings.path : undefined,
x => x.sandboxedCompilesHostDir
(x) => x.sandboxedCompilesHostDir
) != null
let containerMonitorTimeout
@@ -56,7 +56,7 @@ module.exports = DockerRunner = {
) {
let name
if (callback == null) {
callback = function(error, output) {}
callback = function (error, output) {}
}
if (usingSiblingContainers()) {
const _newPath = Settings.path.sandboxedCompilesHostDir
@@ -77,8 +77,8 @@ module.exports = DockerRunner = {
const volumes = {}
volumes[directory] = '/compile'
command = Array.from(command).map(arg =>
__guardMethod__(arg.toString(), 'replace', o =>
command = Array.from(command).map((arg) =>
__guardMethod__(arg.toString(), 'replace', (o) =>
o.replace('$COMPILE_DIR', '/compile')
)
)
@@ -112,7 +112,7 @@ module.exports = DockerRunner = {
// logOptions = _.clone(options)
// logOptions?.HostConfig?.SecurityOpt = "secomp used, removed in logging"
logger.log({ project_id }, 'running docker container')
DockerRunner._runAndWaitForContainer(options, volumes, timeout, function(
DockerRunner._runAndWaitForContainer(options, volumes, timeout, function (
error,
output
) {
@@ -121,7 +121,9 @@ module.exports = DockerRunner = {
{ err: error, project_id },
'error running container so destroying and retrying'
)
return DockerRunner.destroyContainer(name, null, true, function(error) {
return DockerRunner.destroyContainer(name, null, true, function (
error
) {
if (error != null) {
return callback(error)
}
@@ -142,15 +144,17 @@ module.exports = DockerRunner = {
kill(container_id, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
logger.log({ container_id }, 'sending kill signal to container')
const container = dockerode.getContainer(container_id)
return container.kill(function(error) {
return container.kill(function (error) {
if (
error != null &&
__guardMethod__(error != null ? error.message : undefined, 'match', o =>
o.match(/Cannot kill container .* is not running/)
__guardMethod__(
error != null ? error.message : undefined,
'match',
(o) => o.match(/Cannot kill container .* is not running/)
)
) {
logger.warn(
@@ -170,12 +174,12 @@ module.exports = DockerRunner = {
_runAndWaitForContainer(options, volumes, timeout, _callback) {
if (_callback == null) {
_callback = function(error, output) {}
_callback = function (error, output) {}
}
const callback = function(...args) {
const callback = function (...args) {
_callback(...Array.from(args || []))
// Only call the callback once
return (_callback = function() {})
return (_callback = function () {})
}
const { name } = options
@@ -184,13 +188,13 @@ module.exports = DockerRunner = {
let containerReturned = false
let output = {}
const callbackIfFinished = function() {
const callbackIfFinished = function () {
if (streamEnded && containerReturned) {
return callback(null, output)
}
}
const attachStreamHandler = function(error, _output) {
const attachStreamHandler = function (error, _output) {
if (error != null) {
return callback(error)
}
@@ -203,12 +207,12 @@ module.exports = DockerRunner = {
options,
volumes,
attachStreamHandler,
function(error, containerId) {
function (error, containerId) {
if (error != null) {
return callback(error)
}
return DockerRunner.waitForContainer(name, timeout, function(
return DockerRunner.waitForContainer(name, timeout, function (
error,
exitCode
) {
@@ -231,7 +235,7 @@ module.exports = DockerRunner = {
containerReturned = true
__guard__(
options != null ? options.HostConfig : undefined,
x => (x.SecurityOpt = null)
(x) => (x.SecurityOpt = null)
) // small log line
logger.log({ err, exitCode, options }, 'docker container has exited')
return callbackIfFinished()
@@ -357,21 +361,18 @@ module.exports = DockerRunner = {
_fingerprintContainer(containerOptions) {
// Yay, Hashing!
const json = JSON.stringify(containerOptions)
return crypto
.createHash('md5')
.update(json)
.digest('hex')
return crypto.createHash('md5').update(json).digest('hex')
},
startContainer(options, volumes, attachStreamHandler, callback) {
return LockManager.runWithLock(
options.name,
releaseLock =>
(releaseLock) =>
// Check that volumes exist before starting the container.
// When a container is started with volume pointing to a
// non-existent directory then docker creates the directory but
// with root ownership.
DockerRunner._checkVolumes(options, volumes, function(err) {
DockerRunner._checkVolumes(options, volumes, function (err) {
if (err != null) {
return releaseLock(err)
}
@@ -390,7 +391,7 @@ module.exports = DockerRunner = {
// Check that volumes exist and are directories
_checkVolumes(options, volumes, callback) {
if (callback == null) {
callback = function(error, containerName) {}
callback = function (error, containerName) {}
}
if (usingSiblingContainers()) {
// Server Pro, with sibling-containers active, skip checks
@@ -398,7 +399,7 @@ module.exports = DockerRunner = {
}
const checkVolume = (path, cb) =>
fs.stat(path, function(err, stats) {
fs.stat(path, function (err, stats) {
if (err != null) {
return cb(err)
}
@@ -409,14 +410,14 @@ module.exports = DockerRunner = {
})
const jobs = []
for (const vol in volumes) {
;(vol => jobs.push(cb => checkVolume(vol, cb)))(vol)
;((vol) => jobs.push((cb) => checkVolume(vol, cb)))(vol)
}
return async.series(jobs, callback)
},
_startContainer(options, volumes, attachStreamHandler, callback) {
if (callback == null) {
callback = function(error, output) {}
callback = function (error, output) {}
}
callback = _.once(callback)
const { name } = options
@@ -425,7 +426,7 @@ module.exports = DockerRunner = {
const container = dockerode.getContainer(name)
const createAndStartContainer = () =>
dockerode.createContainer(options, function(error, container) {
dockerode.createContainer(options, function (error, container) {
if (error != null) {
return callback(error)
}
@@ -435,11 +436,11 @@ module.exports = DockerRunner = {
DockerRunner.attachToContainer(
options.name,
attachStreamHandler,
function(error) {
function (error) {
if (error != null) {
return callback(error)
}
return container.start(function(error) {
return container.start(function (error) {
if (
error != null &&
(error != null ? error.statusCode : undefined) !== 304
@@ -452,7 +453,7 @@ module.exports = DockerRunner = {
})
}
)
return container.inspect(function(error, stats) {
return container.inspect(function (error, stats) {
if ((error != null ? error.statusCode : undefined) === 404) {
return createAndStartContainer()
} else if (error != null) {
@@ -469,7 +470,7 @@ module.exports = DockerRunner = {
attachToContainer(containerId, attachStreamHandler, attachStartCallback) {
const container = dockerode.getContainer(containerId)
return container.attach({ stdout: 1, stderr: 1, stream: 1 }, function(
return container.attach({ stdout: 1, stderr: 1, stream: 1 }, function (
error,
stream
) {
@@ -486,7 +487,7 @@ module.exports = DockerRunner = {
logger.log({ container_id: containerId }, 'attached to container')
const MAX_OUTPUT = 1024 * 1024 // limit output to 1MB
const createStringOutputStream = function(name) {
const createStringOutputStream = function (name) {
return {
data: '',
overflowed: false,
@@ -519,7 +520,7 @@ module.exports = DockerRunner = {
container.modem.demuxStream(stream, stdout, stderr)
stream.on('error', err =>
stream.on('error', (err) =>
logger.error(
{ err, container_id: containerId },
'error reading from container stream'
@@ -534,28 +535,28 @@ module.exports = DockerRunner = {
waitForContainer(containerId, timeout, _callback) {
if (_callback == null) {
_callback = function(error, exitCode) {}
_callback = function (error, exitCode) {}
}
const callback = function(...args) {
const callback = function (...args) {
_callback(...Array.from(args || []))
// Only call the callback once
return (_callback = function() {})
return (_callback = function () {})
}
const container = dockerode.getContainer(containerId)
let timedOut = false
const timeoutId = setTimeout(function() {
const timeoutId = setTimeout(function () {
timedOut = true
logger.log(
{ container_id: containerId },
'timeout reached, killing container'
)
return container.kill(function() {})
return container.kill(function () {})
}, timeout)
logger.log({ container_id: containerId }, 'waiting for docker container')
return container.wait(function(error, res) {
return container.wait(function (error, res) {
if (error != null) {
clearTimeout(timeoutId)
logger.error(
@@ -588,11 +589,11 @@ module.exports = DockerRunner = {
// error callback. We fall back to deleting by name if no id is
// supplied.
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
return LockManager.runWithLock(
containerName,
releaseLock =>
(releaseLock) =>
DockerRunner._destroyContainer(
containerId || containerName,
shouldForce,
@@ -604,11 +605,11 @@ module.exports = DockerRunner = {
_destroyContainer(containerId, shouldForce, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
logger.log({ container_id: containerId }, 'destroying docker container')
const container = dockerode.getContainer(containerId)
return container.remove({ force: shouldForce === true }, function(error) {
return container.remove({ force: shouldForce === true }, function (error) {
if (
error != null &&
(error != null ? error.statusCode : undefined) === 404
@@ -638,7 +639,7 @@ module.exports = DockerRunner = {
examineOldContainer(container, callback) {
if (callback == null) {
callback = function(error, name, id, ttl) {}
callback = function (error, name, id, ttl) {}
}
const name =
container.Name ||
@@ -657,16 +658,19 @@ module.exports = DockerRunner = {
destroyOldContainers(callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
return dockerode.listContainers({ all: true }, function(error, containers) {
return dockerode.listContainers({ all: true }, function (
error,
containers
) {
if (error != null) {
return callback(error)
}
const jobs = []
for (const container of Array.from(containers || [])) {
;(container =>
DockerRunner.examineOldContainer(container, function(
;((container) =>
DockerRunner.examineOldContainer(container, function (
err,
name,
id,
@@ -676,7 +680,7 @@ module.exports = DockerRunner = {
// strip the / prefix
// the LockManager uses the plain container name
name = name.slice(1)
return jobs.push(cb =>
return jobs.push((cb) =>
DockerRunner.destroyContainer(name, id, false, () => cb())
)
}

View File

@@ -18,9 +18,9 @@ const logger = require('logger-sharelatex')
module.exports = DraftModeManager = {
injectDraftMode(filename, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
return fs.readFile(filename, 'utf8', function(error, content) {
return fs.readFile(filename, 'utf8', function (error, content) {
if (error != null) {
return callback(error)
}

View File

@@ -5,7 +5,7 @@
// TODO: This file was created by bulk-decaffeinate.
// Fix any style issues and re-enable lint.
let Errors
var NotFoundError = function(message) {
var NotFoundError = function (message) {
const error = new Error(message)
error.name = 'NotFoundError'
error.__proto__ = NotFoundError.prototype
@@ -13,7 +13,7 @@ var NotFoundError = function(message) {
}
NotFoundError.prototype.__proto__ = Error.prototype
var FilesOutOfSyncError = function(message) {
var FilesOutOfSyncError = function (message) {
const error = new Error(message)
error.name = 'FilesOutOfSyncError'
error.__proto__ = FilesOutOfSyncError.prototype
@@ -21,7 +21,7 @@ var FilesOutOfSyncError = function(message) {
}
FilesOutOfSyncError.prototype.__proto__ = Error.prototype
var AlreadyCompilingError = function(message) {
var AlreadyCompilingError = function (message) {
const error = new Error(message)
error.name = 'AlreadyCompilingError'
error.__proto__ = AlreadyCompilingError.prototype

View File

@@ -27,7 +27,7 @@ module.exports = LatexRunner = {
runLatex(project_id, options, callback) {
let command
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
let {
directory,
@@ -89,20 +89,20 @@ module.exports = LatexRunner = {
timeout,
environment,
compileGroup,
function(error, output) {
function (error, output) {
delete ProcessTable[id]
if (error != null) {
return callback(error)
}
const runs =
__guard__(
__guard__(output != null ? output.stderr : undefined, x1 =>
__guard__(output != null ? output.stderr : undefined, (x1) =>
x1.match(/^Run number \d+ of .*latex/gm)
),
x => x.length
(x) => x.length
) || 0
const failed =
__guard__(output != null ? output.stdout : undefined, x2 =>
__guard__(output != null ? output.stdout : undefined, (x2) =>
x2.match(/^Latexmk: Errors/m)
) != null
? 1
@@ -122,21 +122,21 @@ module.exports = LatexRunner = {
stderr != null
? stderr.match(/Percent of CPU this job got: (\d+)/m)
: undefined,
x3 => x3[1]
(x3) => x3[1]
) || 0
timings['cpu-time'] =
__guard__(
stderr != null
? stderr.match(/User time.*: (\d+.\d+)/m)
: undefined,
x4 => x4[1]
(x4) => x4[1]
) || 0
timings['sys-time'] =
__guard__(
stderr != null
? stderr.match(/System time.*: (\d+.\d+)/m)
: undefined,
x5 => x5[1]
(x5) => x5[1]
) || 0
// record output files
LatexRunner.writeLogOutput(project_id, directory, output, () => {
@@ -153,7 +153,7 @@ module.exports = LatexRunner = {
// internal method for writing non-empty log files
function _writeFile(file, content, cb) {
if (content && content.length > 0) {
fs.writeFile(file, content, err => {
fs.writeFile(file, content, (err) => {
if (err) {
logger.error({ project_id, file }, 'error writing log file') // don't fail on error
}
@@ -173,7 +173,7 @@ module.exports = LatexRunner = {
killLatex(project_id, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
const id = `${project_id}`
logger.log({ id }, 'killing running compile')
@@ -202,7 +202,7 @@ module.exports = LatexRunner = {
return (
__guard__(
Settings != null ? Settings.clsi : undefined,
x => x.latexmkCommandPrefix
(x) => x.latexmkCommandPrefix
) || []
).concat(args)
},

View File

@@ -33,11 +33,11 @@ module.exports = CommandRunner = {
) {
let key, value
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
} else {
callback = _.once(callback)
}
command = Array.from(command).map(arg =>
command = Array.from(command).map((arg) =>
arg.toString().replace('$COMPILE_DIR', directory)
)
logger.log({ project_id, command, directory }, 'running command')
@@ -58,9 +58,9 @@ module.exports = CommandRunner = {
const proc = spawn(command[0], command.slice(1), { cwd: directory, env })
let stdout = ''
proc.stdout.setEncoding('utf8').on('data', data => (stdout += data))
proc.stdout.setEncoding('utf8').on('data', (data) => (stdout += data))
proc.on('error', function(err) {
proc.on('error', function (err) {
logger.err(
{ err, project_id, command, directory },
'error running command'
@@ -68,7 +68,7 @@ module.exports = CommandRunner = {
return callback(err)
})
proc.on('close', function(code, signal) {
proc.on('close', function (code, signal) {
let err
logger.info({ code, signal, project_id }, 'command exited')
if (signal === 'SIGTERM') {
@@ -91,7 +91,7 @@ module.exports = CommandRunner = {
kill(pid, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
try {
process.kill(-pid) // kill all processes in group

View File

@@ -25,20 +25,20 @@ module.exports = LockManager = {
runWithLock(path, runner, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
const lockOpts = {
wait: this.MAX_LOCK_WAIT_TIME,
pollPeriod: this.LOCK_TEST_INTERVAL,
stale: this.LOCK_STALE
}
return Lockfile.lock(path, lockOpts, function(error) {
return Lockfile.lock(path, lockOpts, function (error) {
if ((error != null ? error.code : undefined) === 'EEXIST') {
return callback(new Errors.AlreadyCompilingError('compile in progress'))
} else if (error != null) {
return fs.lstat(path, (statLockErr, statLock) =>
fs.lstat(Path.dirname(path), (statDirErr, statDir) =>
fs.readdir(Path.dirname(path), function(readdirErr, readdirDir) {
fs.readdir(Path.dirname(path), function (readdirErr, readdirDir) {
logger.err(
{
error,
@@ -58,7 +58,7 @@ module.exports = LockManager = {
)
} else {
return runner((error1, ...args) =>
Lockfile.unlock(path, function(error2) {
Lockfile.unlock(path, function (error2) {
error = error1 || error2
if (error != null) {
return callback(error)

View File

@@ -47,9 +47,9 @@ module.exports = OutputCacheManager = {
generateBuildId(callback) {
// generate a secure build id from Date.now() and 8 random bytes in hex
if (callback == null) {
callback = function(error, buildId) {}
callback = function (error, buildId) {}
}
return crypto.randomBytes(8, function(err, buf) {
return crypto.randomBytes(8, function (err, buf) {
if (err != null) {
return callback(err)
}
@@ -61,9 +61,9 @@ module.exports = OutputCacheManager = {
saveOutputFiles(outputFiles, compileDir, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
return OutputCacheManager.generateBuildId(function(err, buildId) {
return OutputCacheManager.generateBuildId(function (err, buildId) {
if (err != null) {
return callback(err)
}
@@ -80,7 +80,7 @@ module.exports = OutputCacheManager = {
// make a compileDir/CACHE_SUBDIR/build_id directory and
// copy all the output files into it
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
const cacheRoot = Path.join(compileDir, OutputCacheManager.CACHE_SUBDIR)
// Put the files into a new cache subdirectory
@@ -99,17 +99,20 @@ module.exports = OutputCacheManager = {
(Settings.clsi != null ? Settings.clsi.archive_logs : undefined) ||
(Settings.clsi != null ? Settings.clsi.strace : undefined)
) {
OutputCacheManager.archiveLogs(outputFiles, compileDir, buildId, function(
err
) {
if (err != null) {
return logger.warn({ err }, 'erroring archiving log files')
OutputCacheManager.archiveLogs(
outputFiles,
compileDir,
buildId,
function (err) {
if (err != null) {
return logger.warn({ err }, 'erroring archiving log files')
}
}
})
)
}
// make the new cache directory
return fse.ensureDir(cacheDir, function(err) {
return fse.ensureDir(cacheDir, function (err) {
if (err != null) {
logger.error(
{ err, directory: cacheDir },
@@ -121,7 +124,7 @@ module.exports = OutputCacheManager = {
const results = []
return async.mapSeries(
outputFiles,
function(file, cb) {
function (file, cb) {
// don't send dot files as output, express doesn't serve them
if (OutputCacheManager._fileIsHidden(file.path)) {
logger.debug(
@@ -136,7 +139,7 @@ module.exports = OutputCacheManager = {
Path.join(compileDir, file.path),
Path.join(cacheDir, file.path)
])
return OutputCacheManager._checkFileIsSafe(src, function(
return OutputCacheManager._checkFileIsSafe(src, function (
err,
isSafe
) {
@@ -146,7 +149,7 @@ module.exports = OutputCacheManager = {
if (!isSafe) {
return cb()
}
return OutputCacheManager._checkIfShouldCopy(src, function(
return OutputCacheManager._checkIfShouldCopy(src, function (
err,
shouldCopy
) {
@@ -156,7 +159,7 @@ module.exports = OutputCacheManager = {
if (!shouldCopy) {
return cb()
}
return OutputCacheManager._copyFile(src, dst, function(err) {
return OutputCacheManager._copyFile(src, dst, function (err) {
if (err != null) {
return cb(err)
}
@@ -167,12 +170,12 @@ module.exports = OutputCacheManager = {
})
})
},
function(err) {
function (err) {
if (err != null) {
// pass back the original files if we encountered *any* error
callback(err, outputFiles)
// clean up the directory we just created
return fse.remove(cacheDir, function(err) {
return fse.remove(cacheDir, function (err) {
if (err != null) {
return logger.error(
{ err, dir: cacheDir },
@@ -197,7 +200,7 @@ module.exports = OutputCacheManager = {
archiveLogs(outputFiles, compileDir, buildId, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
const archiveDir = Path.join(
compileDir,
@@ -205,18 +208,18 @@ module.exports = OutputCacheManager = {
buildId
)
logger.log({ dir: archiveDir }, 'archiving log files for project')
return fse.ensureDir(archiveDir, function(err) {
return fse.ensureDir(archiveDir, function (err) {
if (err != null) {
return callback(err)
}
return async.mapSeries(
outputFiles,
function(file, cb) {
function (file, cb) {
const [src, dst] = Array.from([
Path.join(compileDir, file.path),
Path.join(archiveDir, file.path)
])
return OutputCacheManager._checkFileIsSafe(src, function(
return OutputCacheManager._checkFileIsSafe(src, function (
err,
isSafe
) {
@@ -226,7 +229,7 @@ module.exports = OutputCacheManager = {
if (!isSafe) {
return cb()
}
return OutputCacheManager._checkIfShouldArchive(src, function(
return OutputCacheManager._checkIfShouldArchive(src, function (
err,
shouldArchive
) {
@@ -248,9 +251,9 @@ module.exports = OutputCacheManager = {
expireOutputFiles(cacheRoot, options, callback) {
// look in compileDir for build dirs and delete if > N or age of mod time > T
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
return fs.readdir(cacheRoot, function(err, results) {
return fs.readdir(cacheRoot, function (err, results) {
if (err != null) {
if (err.code === 'ENOENT') {
return callback(null)
@@ -262,7 +265,7 @@ module.exports = OutputCacheManager = {
const dirs = results.sort().reverse()
const currentTime = Date.now()
const isExpired = function(dir, index) {
const isExpired = function (dir, index) {
if ((options != null ? options.keep : undefined) === dir) {
return false
}
@@ -280,7 +283,7 @@ module.exports = OutputCacheManager = {
// we can get the build time from the first part of the directory name DDDD-RRRR
// DDDD is date and RRRR is random bytes
const dirTime = parseInt(
__guard__(dir.split('-'), x => x[0]),
__guard__(dir.split('-'), (x) => x[0]),
16
)
const age = currentTime - dirTime
@@ -290,7 +293,7 @@ module.exports = OutputCacheManager = {
const toRemove = _.filter(dirs, isExpired)
const removeDir = (dir, cb) =>
fse.remove(Path.join(cacheRoot, dir), function(err, result) {
fse.remove(Path.join(cacheRoot, dir), function (err, result) {
logger.log({ cache: cacheRoot, dir }, 'removed expired cache dir')
if (err != null) {
logger.error({ err, dir }, 'cache remove error')
@@ -312,9 +315,9 @@ module.exports = OutputCacheManager = {
_checkFileIsSafe(src, callback) {
// check if we have a valid file to copy into the cache
if (callback == null) {
callback = function(error, isSafe) {}
callback = function (error, isSafe) {}
}
return fs.stat(src, function(err, stats) {
return fs.stat(src, function (err, stats) {
if ((err != null ? err.code : undefined) === 'ENOENT') {
logger.warn(
{ err, file: src },
@@ -341,7 +344,7 @@ module.exports = OutputCacheManager = {
_copyFile(src, dst, callback) {
// copy output file into the cache
return fse.copy(src, dst, function(err) {
return fse.copy(src, dst, function (err) {
if ((err != null ? err.code : undefined) === 'ENOENT') {
logger.warn(
{ err, file: src },
@@ -368,7 +371,7 @@ module.exports = OutputCacheManager = {
_checkIfShouldCopy(src, callback) {
if (callback == null) {
callback = function(err, shouldCopy) {}
callback = function (err, shouldCopy) {}
}
return callback(null, !Path.basename(src).match(/^strace/))
},
@@ -376,7 +379,7 @@ module.exports = OutputCacheManager = {
_checkIfShouldArchive(src, callback) {
let needle
if (callback == null) {
callback = function(err, shouldCopy) {}
callback = function (err, shouldCopy) {}
}
if (Path.basename(src).match(/^strace/)) {
return callback(null, true)

View File

@@ -24,14 +24,14 @@ const logger = require('logger-sharelatex')
module.exports = OutputFileFinder = {
findOutputFiles(resources, directory, callback) {
if (callback == null) {
callback = function(error, outputFiles, allFiles) {}
callback = function (error, outputFiles, allFiles) {}
}
const incomingResources = {}
for (const resource of Array.from(resources)) {
incomingResources[resource.path] = true
}
return OutputFileFinder._getAllFiles(directory, function(error, allFiles) {
return OutputFileFinder._getAllFiles(directory, function (error, allFiles) {
if (allFiles == null) {
allFiles = []
}
@@ -44,7 +44,7 @@ module.exports = OutputFileFinder = {
if (!incomingResources[file]) {
outputFiles.push({
path: file,
type: __guard__(file.match(/\.([^\.]+)$/), x => x[1])
type: __guard__(file.match(/\.([^\.]+)$/), (x) => x[1])
})
}
}
@@ -54,11 +54,11 @@ module.exports = OutputFileFinder = {
_getAllFiles(directory, _callback) {
if (_callback == null) {
_callback = function(error, fileList) {}
_callback = function (error, fileList) {}
}
const callback = function(error, fileList) {
const callback = function (error, fileList) {
_callback(error, fileList)
return (_callback = function() {})
return (_callback = function () {})
}
// don't include clsi-specific files/directories in the output list
@@ -87,9 +87,9 @@ module.exports = OutputFileFinder = {
const proc = spawn('find', args)
let stdout = ''
proc.stdout.setEncoding('utf8').on('data', chunk => (stdout += chunk))
proc.stdout.setEncoding('utf8').on('data', (chunk) => (stdout += chunk))
proc.on('error', callback)
return proc.on('close', function(code) {
return proc.on('close', function (code) {
if (code !== 0) {
logger.warn(
{ directory, code },
@@ -98,7 +98,7 @@ module.exports = OutputFileFinder = {
return callback(null, [])
}
let fileList = stdout.trim().split('\n')
fileList = fileList.map(function(file) {
fileList = fileList.map(function (file) {
// Strip leading directory
let path
return (path = Path.relative(directory, file))

View File

@@ -26,10 +26,10 @@ module.exports = OutputFileOptimiser = {
// check output file (src) and see if we can optimise it, storing
// the result in the build directory (dst)
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
if (src.match(/\/output\.pdf$/)) {
return OutputFileOptimiser.checkIfPDFIsOptimised(src, function(
return OutputFileOptimiser.checkIfPDFIsOptimised(src, function (
err,
isOptimised
) {
@@ -46,12 +46,12 @@ module.exports = OutputFileOptimiser = {
checkIfPDFIsOptimised(file, callback) {
const SIZE = 16 * 1024 // check the header of the pdf
const result = Buffer.alloc(SIZE) // fills with zeroes by default
return fs.open(file, 'r', function(err, fd) {
return fs.open(file, 'r', function (err, fd) {
if (err != null) {
return callback(err)
}
return fs.read(fd, result, 0, SIZE, 0, (errRead, bytesRead, buffer) =>
fs.close(fd, function(errClose) {
fs.close(fd, function (errClose) {
if (errRead != null) {
return callback(errRead)
}
@@ -68,7 +68,7 @@ module.exports = OutputFileOptimiser = {
optimisePDF(src, dst, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
const tmpOutput = dst + '.opt'
const args = ['--linearize', src, tmpOutput]
@@ -77,19 +77,19 @@ module.exports = OutputFileOptimiser = {
const timer = new Metrics.Timer('qpdf')
const proc = spawn('qpdf', args)
let stdout = ''
proc.stdout.setEncoding('utf8').on('data', chunk => (stdout += chunk))
proc.stdout.setEncoding('utf8').on('data', (chunk) => (stdout += chunk))
callback = _.once(callback) // avoid double call back for error and close event
proc.on('error', function(err) {
proc.on('error', function (err) {
logger.warn({ err, args }, 'qpdf failed')
return callback(null)
}) // ignore the error
return proc.on('close', function(code) {
return proc.on('close', function (code) {
timer.done()
if (code !== 0) {
logger.warn({ code, args }, 'qpdf returned error')
return callback(null) // ignore the error
}
return fs.rename(tmpOutput, dst, function(err) {
return fs.rename(tmpOutput, dst, function (err) {
if (err != null) {
logger.warn(
{ tmpOutput, dst },

View File

@@ -27,9 +27,9 @@ module.exports = ProjectPersistenceManager = {
refreshExpiryTimeout(callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
diskusage.check('/', function(err, stats) {
diskusage.check('/', function (err, stats) {
if (err) {
logger.err({ err: err }, 'error getting disk usage')
return callback(err)
@@ -48,9 +48,9 @@ module.exports = ProjectPersistenceManager = {
},
markProjectAsJustAccessed(project_id, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
const job = cb =>
const job = (cb) =>
db.Project.findOrCreate({ where: { project_id } })
.spread((project, created) =>
project
@@ -64,9 +64,9 @@ module.exports = ProjectPersistenceManager = {
clearExpiredProjects(callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
return ProjectPersistenceManager._findExpiredProjectIds(function(
return ProjectPersistenceManager._findExpiredProjectIds(function (
error,
project_ids
) {
@@ -74,9 +74,9 @@ module.exports = ProjectPersistenceManager = {
return callback(error)
}
logger.log({ project_ids }, 'clearing expired projects')
const jobs = Array.from(project_ids || []).map(project_id =>
(project_id => callback =>
ProjectPersistenceManager.clearProjectFromCache(project_id, function(
const jobs = Array.from(project_ids || []).map((project_id) =>
((project_id) => (callback) =>
ProjectPersistenceManager.clearProjectFromCache(project_id, function (
err
) {
if (err != null) {
@@ -85,13 +85,13 @@ module.exports = ProjectPersistenceManager = {
return callback()
}))(project_id)
)
return async.series(jobs, function(error) {
return async.series(jobs, function (error) {
if (error != null) {
return callback(error)
}
return CompileManager.clearExpiredProjects(
ProjectPersistenceManager.EXPIRY_TIMEOUT,
error => callback()
(error) => callback()
)
})
})
@@ -99,16 +99,16 @@ module.exports = ProjectPersistenceManager = {
clearProject(project_id, user_id, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
logger.log({ project_id, user_id }, 'clearing project for user')
return CompileManager.clearProject(project_id, user_id, function(error) {
return CompileManager.clearProject(project_id, user_id, function (error) {
if (error != null) {
return callback(error)
}
return ProjectPersistenceManager.clearProjectFromCache(
project_id,
function(error) {
function (error) {
if (error != null) {
return callback(error)
}
@@ -120,17 +120,17 @@ module.exports = ProjectPersistenceManager = {
clearProjectFromCache(project_id, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
logger.log({ project_id }, 'clearing project from cache')
return UrlCache.clearProject(project_id, function(error) {
return UrlCache.clearProject(project_id, function (error) {
if (error != null) {
logger.err({ error, project_id }, 'error clearing project from cache')
return callback(error)
}
return ProjectPersistenceManager._clearProjectFromDatabase(
project_id,
function(error) {
function (error) {
if (error != null) {
logger.err(
{ error, project_id },
@@ -145,10 +145,10 @@ module.exports = ProjectPersistenceManager = {
_clearProjectFromDatabase(project_id, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
logger.log({ project_id }, 'clearing project from database')
const job = cb =>
const job = (cb) =>
db.Project.destroy({ where: { project_id } })
.then(() => cb())
.error(cb)
@@ -157,19 +157,19 @@ module.exports = ProjectPersistenceManager = {
_findExpiredProjectIds(callback) {
if (callback == null) {
callback = function(error, project_ids) {}
callback = function (error, project_ids) {}
}
const job = function(cb) {
const job = function (cb) {
const keepProjectsFrom = new Date(
Date.now() - ProjectPersistenceManager.EXPIRY_TIMEOUT
)
const q = {}
q[db.op.lt] = keepProjectsFrom
return db.Project.findAll({ where: { lastAccessed: q } })
.then(projects =>
.then((projects) =>
cb(
null,
projects.map(project => project.project_id)
projects.map((project) => project.project_id)
)
)
.error(cb)

View File

@@ -27,7 +27,7 @@ module.exports = RequestParser = {
parse(body, callback) {
let resource
if (callback == null) {
callback = function(error, data) {}
callback = function (error, data) {}
}
const response = {}

View File

@@ -41,13 +41,13 @@ module.exports = ResourceStateManager = {
saveProjectState(state, resources, basePath, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
const stateFile = Path.join(basePath, this.SYNC_STATE_FILE)
if (state == null) {
// remove the file if no state passed in
logger.log({ state, basePath }, 'clearing sync state')
return fs.unlink(stateFile, function(err) {
return fs.unlink(stateFile, function (err) {
if (err != null && err.code !== 'ENOENT') {
return callback(err)
} else {
@@ -56,7 +56,9 @@ module.exports = ResourceStateManager = {
})
} else {
logger.log({ state, basePath }, 'writing sync state')
const resourceList = Array.from(resources).map(resource => resource.path)
const resourceList = Array.from(resources).map(
(resource) => resource.path
)
return fs.writeFile(
stateFile,
[...Array.from(resourceList), `stateHash:${state}`].join('\n'),
@@ -67,11 +69,11 @@ module.exports = ResourceStateManager = {
checkProjectStateMatches(state, basePath, callback) {
if (callback == null) {
callback = function(error, resources) {}
callback = function (error, resources) {}
}
const stateFile = Path.join(basePath, this.SYNC_STATE_FILE)
const size = this.SYNC_STATE_MAX_SIZE
return SafeReader.readFile(stateFile, size, 'utf8', function(
return SafeReader.readFile(stateFile, size, 'utf8', function (
err,
result,
bytesRead
@@ -86,7 +88,7 @@ module.exports = ResourceStateManager = {
)
}
const array =
__guard__(result != null ? result.toString() : undefined, x =>
__guard__(result != null ? result.toString() : undefined, (x) =>
x.split('\n')
) || []
const adjustedLength = Math.max(array.length, 1)
@@ -102,7 +104,7 @@ module.exports = ResourceStateManager = {
new Errors.FilesOutOfSyncError('invalid state for incremental update')
)
} else {
const resources = Array.from(resourceList).map(path => ({ path }))
const resources = Array.from(resourceList).map((path) => ({ path }))
return callback(null, resources)
}
})
@@ -112,11 +114,11 @@ module.exports = ResourceStateManager = {
// check the paths are all relative to current directory
let file
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
for (file of Array.from(resources || [])) {
for (const dir of Array.from(
__guard__(file != null ? file.path : undefined, x => x.split('/'))
__guard__(file != null ? file.path : undefined, (x) => x.split('/'))
)) {
if (dir === '..') {
return callback(new Error('relative path in resource file list'))
@@ -129,8 +131,8 @@ module.exports = ResourceStateManager = {
seenFile[file] = true
}
const missingFiles = Array.from(resources)
.filter(resource => !seenFile[resource.path])
.map(resource => resource.path)
.filter((resource) => !seenFile[resource.path])
.map((resource) => resource.path)
if ((missingFiles != null ? missingFiles.length : undefined) > 0) {
logger.err(
{ missingFiles, basePath, allFiles, resources },

View File

@@ -30,7 +30,7 @@ const parallelFileDownloads = settings.parallelFileDownloads || 1
module.exports = ResourceWriter = {
syncResourcesToDisk(request, basePath, callback) {
if (callback == null) {
callback = function(error, resourceList) {}
callback = function (error, resourceList) {}
}
if (request.syncType === 'incremental') {
logger.log(
@@ -40,14 +40,14 @@ module.exports = ResourceWriter = {
return ResourceStateManager.checkProjectStateMatches(
request.syncState,
basePath,
function(error, resourceList) {
function (error, resourceList) {
if (error != null) {
return callback(error)
}
return ResourceWriter._removeExtraneousFiles(
resourceList,
basePath,
function(error, outputFiles, allFiles) {
function (error, outputFiles, allFiles) {
if (error != null) {
return callback(error)
}
@@ -55,7 +55,7 @@ module.exports = ResourceWriter = {
resourceList,
allFiles,
basePath,
function(error) {
function (error) {
if (error != null) {
return callback(error)
}
@@ -63,7 +63,7 @@ module.exports = ResourceWriter = {
request.project_id,
request.resources,
basePath,
function(error) {
function (error) {
if (error != null) {
return callback(error)
}
@@ -85,7 +85,7 @@ module.exports = ResourceWriter = {
request.project_id,
request.resources,
basePath,
function(error) {
function (error) {
if (error != null) {
return callback(error)
}
@@ -93,7 +93,7 @@ module.exports = ResourceWriter = {
request.syncState,
request.resources,
basePath,
function(error) {
function (error) {
if (error != null) {
return callback(error)
}
@@ -107,15 +107,15 @@ module.exports = ResourceWriter = {
saveIncrementalResourcesToDisk(project_id, resources, basePath, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
return this._createDirectory(basePath, error => {
return this._createDirectory(basePath, (error) => {
if (error != null) {
return callback(error)
}
const jobs = Array.from(resources).map(resource =>
(resource => {
return callback =>
const jobs = Array.from(resources).map((resource) =>
((resource) => {
return (callback) =>
this._writeResourceToDisk(project_id, resource, basePath, callback)
})(resource)
)
@@ -125,19 +125,19 @@ module.exports = ResourceWriter = {
saveAllResourcesToDisk(project_id, resources, basePath, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
return this._createDirectory(basePath, error => {
return this._createDirectory(basePath, (error) => {
if (error != null) {
return callback(error)
}
return this._removeExtraneousFiles(resources, basePath, error => {
return this._removeExtraneousFiles(resources, basePath, (error) => {
if (error != null) {
return callback(error)
}
const jobs = Array.from(resources).map(resource =>
(resource => {
return callback =>
const jobs = Array.from(resources).map((resource) =>
((resource) => {
return (callback) =>
this._writeResourceToDisk(
project_id,
resource,
@@ -153,9 +153,9 @@ module.exports = ResourceWriter = {
_createDirectory(basePath, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
return fs.mkdir(basePath, function(err) {
return fs.mkdir(basePath, function (err) {
if (err != null) {
if (err.code === 'EEXIST') {
return callback()
@@ -171,15 +171,15 @@ module.exports = ResourceWriter = {
_removeExtraneousFiles(resources, basePath, _callback) {
if (_callback == null) {
_callback = function(error, outputFiles, allFiles) {}
_callback = function (error, outputFiles, allFiles) {}
}
const timer = new Metrics.Timer('unlink-output-files')
const callback = function(error, ...result) {
const callback = function (error, ...result) {
timer.done()
return _callback(error, ...Array.from(result))
}
return OutputFileFinder.findOutputFiles(resources, basePath, function(
return OutputFileFinder.findOutputFiles(resources, basePath, function (
error,
outputFiles,
allFiles
@@ -190,7 +190,7 @@ module.exports = ResourceWriter = {
const jobs = []
for (const file of Array.from(outputFiles || [])) {
;(function(file) {
;(function (file) {
const { path } = file
let should_delete = true
if (
@@ -242,7 +242,7 @@ module.exports = ResourceWriter = {
should_delete = true
}
if (should_delete) {
return jobs.push(callback =>
return jobs.push((callback) =>
ResourceWriter._deleteFileIfNotDirectory(
Path.join(basePath, path),
callback
@@ -252,7 +252,7 @@ module.exports = ResourceWriter = {
})(file)
}
return async.series(jobs, function(error) {
return async.series(jobs, function (error) {
if (error != null) {
return callback(error)
}
@@ -263,9 +263,9 @@ module.exports = ResourceWriter = {
_deleteFileIfNotDirectory(path, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
return fs.stat(path, function(error, stat) {
return fs.stat(path, function (error, stat) {
if (error != null && error.code === 'ENOENT') {
return callback()
} else if (error != null) {
@@ -275,7 +275,7 @@ module.exports = ResourceWriter = {
)
return callback(error)
} else if (stat.isFile()) {
return fs.unlink(path, function(error) {
return fs.unlink(path, function (error) {
if (error != null) {
logger.err(
{ err: error, path },
@@ -294,16 +294,18 @@ module.exports = ResourceWriter = {
_writeResourceToDisk(project_id, resource, basePath, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
return ResourceWriter.checkPath(basePath, resource.path, function(
return ResourceWriter.checkPath(basePath, resource.path, function (
error,
path
) {
if (error != null) {
return callback(error)
}
return fs.mkdir(Path.dirname(path), { recursive: true }, function(error) {
return fs.mkdir(Path.dirname(path), { recursive: true }, function (
error
) {
if (error != null) {
return callback(error)
}
@@ -314,7 +316,7 @@ module.exports = ResourceWriter = {
resource.url,
path,
resource.modified,
function(err) {
function (err) {
if (err != null) {
logger.err(
{

View File

@@ -22,9 +22,9 @@ module.exports = SafeReader = {
readFile(file, size, encoding, callback) {
if (callback == null) {
callback = function(error, result) {}
callback = function (error, result) {}
}
return fs.open(file, 'r', function(err, fd) {
return fs.open(file, 'r', function (err, fd) {
if (err != null && err.code === 'ENOENT') {
return callback()
}
@@ -34,7 +34,7 @@ module.exports = SafeReader = {
// safely return always closing the file
const callbackWithClose = (err, ...result) =>
fs.close(fd, function(err1) {
fs.close(fd, function (err1) {
if (err != null) {
return callback(err)
}
@@ -44,7 +44,7 @@ module.exports = SafeReader = {
return callback(null, ...Array.from(result))
})
const buff = Buffer.alloc(size) // fills with zeroes by default
return fs.read(fd, buff, 0, buff.length, 0, function(
return fs.read(fd, buff, 0, buff.length, 0, function (
err,
bytesRead,
buffer

View File

@@ -21,12 +21,12 @@ const Settings = require('settings-sharelatex')
const logger = require('logger-sharelatex')
const url = require('url')
module.exports = ForbidSymlinks = function(staticFn, root, options) {
module.exports = ForbidSymlinks = function (staticFn, root, options) {
const expressStatic = staticFn(root, options)
const basePath = Path.resolve(root)
return function(req, res, next) {
return function (req, res, next) {
let file, project_id, result
const path = __guard__(url.parse(req.url), x => x.pathname)
const path = __guard__(url.parse(req.url), (x) => x.pathname)
// check that the path is of the form /project_id_or_name/path/to/file.log
if ((result = path.match(/^\/?([a-zA-Z0-9_-]+)\/(.*)/))) {
project_id = result[1]
@@ -52,7 +52,7 @@ module.exports = ForbidSymlinks = function(staticFn, root, options) {
return res.sendStatus(404)
}
// check that the requested path is not a symlink
return fs.realpath(requestedFsPath, function(err, realFsPath) {
return fs.realpath(requestedFsPath, function (err, realFsPath) {
if (err != null) {
if (err.code === 'ENOENT') {
return res.sendStatus(404)

View File

@@ -26,7 +26,7 @@ module.exports = TikzManager = {
checkMainFile(compileDir, mainFile, resources, callback) {
// if there's already an output.tex file, we don't want to touch it
if (callback == null) {
callback = function(error, needsMainFile) {}
callback = function (error, needsMainFile) {}
}
for (const resource of Array.from(resources)) {
if (resource.path === 'output.tex') {
@@ -35,14 +35,17 @@ module.exports = TikzManager = {
}
}
// if there's no output.tex, see if we are using tikz/pgf or pstool in the main file
return ResourceWriter.checkPath(compileDir, mainFile, function(
return ResourceWriter.checkPath(compileDir, mainFile, function (
error,
path
) {
if (error != null) {
return callback(error)
}
return SafeReader.readFile(path, 65536, 'utf8', function(error, content) {
return SafeReader.readFile(path, 65536, 'utf8', function (
error,
content
) {
if (error != null) {
return callback(error)
}
@@ -64,16 +67,16 @@ module.exports = TikzManager = {
injectOutputFile(compileDir, mainFile, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
return ResourceWriter.checkPath(compileDir, mainFile, function(
return ResourceWriter.checkPath(compileDir, mainFile, function (
error,
path
) {
if (error != null) {
return callback(error)
}
return fs.readFile(path, 'utf8', function(error, content) {
return fs.readFile(path, 'utf8', function (error, content) {
if (error != null) {
return callback(error)
}

View File

@@ -25,7 +25,7 @@ const async = require('async')
module.exports = UrlCache = {
downloadUrlToFile(project_id, url, destPath, lastModified, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
return UrlCache._ensureUrlIsInCache(
project_id,
@@ -35,7 +35,7 @@ module.exports = UrlCache = {
if (error != null) {
return callback(error)
}
return UrlCache._copyFile(pathToCachedUrl, destPath, function(error) {
return UrlCache._copyFile(pathToCachedUrl, destPath, function (error) {
if (error != null) {
return UrlCache._clearUrlDetails(project_id, url, () =>
callback(error)
@@ -50,9 +50,9 @@ module.exports = UrlCache = {
clearProject(project_id, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
return UrlCache._findAllUrlsInProject(project_id, function(error, urls) {
return UrlCache._findAllUrlsInProject(project_id, function (error, urls) {
logger.log(
{ project_id, url_count: urls.length },
'clearing project URLs'
@@ -60,9 +60,9 @@ module.exports = UrlCache = {
if (error != null) {
return callback(error)
}
const jobs = Array.from(urls || []).map(url =>
(url => callback =>
UrlCache._clearUrlFromCache(project_id, url, function(error) {
const jobs = Array.from(urls || []).map((url) =>
((url) => (callback) =>
UrlCache._clearUrlFromCache(project_id, url, function (error) {
if (error != null) {
logger.error(
{ err: error, project_id, url },
@@ -78,7 +78,7 @@ module.exports = UrlCache = {
_ensureUrlIsInCache(project_id, url, lastModified, callback) {
if (callback == null) {
callback = function(error, pathOnDisk) {}
callback = function (error, pathOnDisk) {}
}
if (lastModified != null) {
// MYSQL only stores dates to an accuracy of a second but the incoming lastModified might have milliseconds.
@@ -98,7 +98,7 @@ module.exports = UrlCache = {
return UrlFetcher.pipeUrlToFileWithRetry(
url,
UrlCache._cacheFilePathForUrl(project_id, url),
error => {
(error) => {
if (error != null) {
return callback(error)
}
@@ -106,7 +106,7 @@ module.exports = UrlCache = {
project_id,
url,
lastModified,
error => {
(error) => {
if (error != null) {
return callback(error)
}
@@ -128,12 +128,12 @@ module.exports = UrlCache = {
_doesUrlNeedDownloading(project_id, url, lastModified, callback) {
if (callback == null) {
callback = function(error, needsDownloading) {}
callback = function (error, needsDownloading) {}
}
if (lastModified == null) {
return callback(null, true)
}
return UrlCache._findUrlDetails(project_id, url, function(
return UrlCache._findUrlDetails(project_id, url, function (
error,
urlDetails
) {
@@ -153,14 +153,7 @@ module.exports = UrlCache = {
},
_cacheFileNameForUrl(project_id, url) {
return (
project_id +
':' +
crypto
.createHash('md5')
.update(url)
.digest('hex')
)
return project_id + ':' + crypto.createHash('md5').update(url).digest('hex')
},
_cacheFilePathForUrl(project_id, url) {
@@ -172,14 +165,14 @@ module.exports = UrlCache = {
_copyFile(from, to, _callback) {
if (_callback == null) {
_callback = function(error) {}
_callback = function (error) {}
}
const callbackOnce = function(error) {
const callbackOnce = function (error) {
if (error != null) {
logger.error({ err: error, from, to }, 'error copying file from cache')
}
_callback(error)
return (_callback = function() {})
return (_callback = function () {})
}
const writeStream = fs.createWriteStream(to)
const readStream = fs.createReadStream(from)
@@ -191,13 +184,15 @@ module.exports = UrlCache = {
_clearUrlFromCache(project_id, url, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
return UrlCache._clearUrlDetails(project_id, url, function(error) {
return UrlCache._clearUrlDetails(project_id, url, function (error) {
if (error != null) {
return callback(error)
}
return UrlCache._deleteUrlCacheFromDisk(project_id, url, function(error) {
return UrlCache._deleteUrlCacheFromDisk(project_id, url, function (
error
) {
if (error != null) {
return callback(error)
}
@@ -208,9 +203,9 @@ module.exports = UrlCache = {
_deleteUrlCacheFromDisk(project_id, url, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
return fs.unlink(UrlCache._cacheFilePathForUrl(project_id, url), function(
return fs.unlink(UrlCache._cacheFilePathForUrl(project_id, url), function (
error
) {
if (error != null && error.code !== 'ENOENT') {
@@ -224,20 +219,20 @@ module.exports = UrlCache = {
_findUrlDetails(project_id, url, callback) {
if (callback == null) {
callback = function(error, urlDetails) {}
callback = function (error, urlDetails) {}
}
const job = cb =>
const job = (cb) =>
db.UrlCache.findOne({ where: { url, project_id } })
.then(urlDetails => cb(null, urlDetails))
.then((urlDetails) => cb(null, urlDetails))
.error(cb)
return dbQueue.queue.push(job, callback)
},
_updateOrCreateUrlDetails(project_id, url, lastModified, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
const job = cb =>
const job = (cb) =>
db.UrlCache.findOrCreate({ where: { url, project_id } })
.spread((urlDetails, created) =>
urlDetails
@@ -251,9 +246,9 @@ module.exports = UrlCache = {
_clearUrlDetails(project_id, url, callback) {
if (callback == null) {
callback = function(error) {}
callback = function (error) {}
}
const job = cb =>
const job = (cb) =>
db.UrlCache.destroy({ where: { url, project_id } })
.then(() => cb(null))
.error(cb)
@@ -262,14 +257,14 @@ module.exports = UrlCache = {
_findAllUrlsInProject(project_id, callback) {
if (callback == null) {
callback = function(error, urls) {}
callback = function (error, urls) {}
}
const job = cb =>
const job = (cb) =>
db.UrlCache.findAll({ where: { project_id } })
.then(urlEntries =>
.then((urlEntries) =>
cb(
null,
urlEntries.map(entry => entry.url)
urlEntries.map((entry) => entry.url)
)
)
.error(cb)

View File

@@ -24,7 +24,7 @@ const oneMinute = 60 * 1000
module.exports = UrlFetcher = {
pipeUrlToFileWithRetry(url, filePath, callback) {
const doDownload = function(cb) {
const doDownload = function (cb) {
UrlFetcher.pipeUrlToFile(url, filePath, cb)
}
async.retry(3, doDownload, callback)
@@ -32,14 +32,14 @@ module.exports = UrlFetcher = {
pipeUrlToFile(url, filePath, _callback) {
if (_callback == null) {
_callback = function(error) {}
_callback = function (error) {}
}
const callbackOnce = function(error) {
const callbackOnce = function (error) {
if (timeoutHandler != null) {
clearTimeout(timeoutHandler)
}
_callback(error)
return (_callback = function() {})
return (_callback = function () {})
}
if (settings.filestoreDomainOveride != null) {
@@ -47,7 +47,7 @@ module.exports = UrlFetcher = {
url = `${settings.filestoreDomainOveride}${p}`
}
var timeoutHandler = setTimeout(
function() {
function () {
timeoutHandler = null
logger.error({ url, filePath }, 'Timed out downloading file to cache')
return callbackOnce(
@@ -63,7 +63,7 @@ module.exports = UrlFetcher = {
urlStream.pause() // stop data flowing until we are ready
// attach handlers before setting up pipes
urlStream.on('error', function(error) {
urlStream.on('error', function (error) {
logger.error({ err: error, url, filePath }, 'error downloading url')
return callbackOnce(
error || new Error(`Something went wrong downloading the URL ${url}`)
@@ -74,17 +74,17 @@ module.exports = UrlFetcher = {
logger.log({ url, filePath }, 'finished downloading file into cache')
)
return urlStream.on('response', function(res) {
return urlStream.on('response', function (res) {
if (res.statusCode >= 200 && res.statusCode < 300) {
const fileStream = fs.createWriteStream(filePath)
// attach handlers before setting up pipes
fileStream.on('error', function(error) {
fileStream.on('error', function (error) {
logger.error(
{ err: error, url, filePath },
'error writing file into cache'
)
return fs.unlink(filePath, function(err) {
return fs.unlink(filePath, function (err) {
if (err != null) {
logger.err({ err, filePath }, 'error deleting file from cache')
}
@@ -92,7 +92,7 @@ module.exports = UrlFetcher = {
})
})
fileStream.on('finish', function() {
fileStream.on('finish', function () {
logger.log({ url, filePath }, 'finished writing file into cache')
return callbackOnce()
})

View File

@@ -62,6 +62,6 @@ module.exports = {
return sequelize
.sync()
.then(() => logger.log('db sync complete'))
.catch(err => console.log(err, 'error syncing'))
.catch((err) => console.log(err, 'error syncing'))
}
}