prettier: convert individual decaffeinated files to Prettier format

This commit is contained in:
mserranom
2020-02-19 12:16:18 +01:00
parent 62d20ee5f0
commit de36ab663c
2 changed files with 388 additions and 304 deletions

519
app.js
View File

@@ -5,294 +5,367 @@
* DS207: Consider shorter variations of null checks * DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/ */
let tenMinutes; let tenMinutes
const Metrics = require("metrics-sharelatex"); const Metrics = require('metrics-sharelatex')
Metrics.initialize("clsi"); Metrics.initialize('clsi')
const CompileController = require("./app/js/CompileController"); const CompileController = require('./app/js/CompileController')
const Settings = require("settings-sharelatex"); const Settings = require('settings-sharelatex')
const logger = require("logger-sharelatex"); const logger = require('logger-sharelatex')
logger.initialize("clsi"); logger.initialize('clsi')
if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) { if ((Settings.sentry != null ? Settings.sentry.dsn : undefined) != null) {
logger.initializeErrorReporting(Settings.sentry.dsn); logger.initializeErrorReporting(Settings.sentry.dsn)
} }
const smokeTest = require("smoke-test-sharelatex"); const smokeTest = require('smoke-test-sharelatex')
const ContentTypeMapper = require("./app/js/ContentTypeMapper"); const ContentTypeMapper = require('./app/js/ContentTypeMapper')
const Errors = require('./app/js/Errors'); const Errors = require('./app/js/Errors')
const Path = require("path"); const Path = require('path')
const fs = require("fs"); const fs = require('fs')
Metrics.open_sockets.monitor(logger)
Metrics.memory.monitor(logger)
Metrics.open_sockets.monitor(logger); const ProjectPersistenceManager = require('./app/js/ProjectPersistenceManager')
Metrics.memory.monitor(logger); const OutputCacheManager = require('./app/js/OutputCacheManager')
const ProjectPersistenceManager = require("./app/js/ProjectPersistenceManager"); require('./app/js/db').sync()
const OutputCacheManager = require("./app/js/OutputCacheManager");
require("./app/js/db").sync(); const express = require('express')
const bodyParser = require('body-parser')
const app = express()
const express = require("express"); Metrics.injectMetricsRoute(app)
const bodyParser = require("body-parser"); app.use(Metrics.http.monitor(logger))
const app = express();
Metrics.injectMetricsRoute(app);
app.use(Metrics.http.monitor(logger));
// Compile requests can take longer than the default two // Compile requests can take longer than the default two
// minutes (including file download time), so bump up the // minutes (including file download time), so bump up the
// timeout a bit. // timeout a bit.
const TIMEOUT = 10 * 60 * 1000; const TIMEOUT = 10 * 60 * 1000
app.use(function(req, res, next) { app.use(function(req, res, next) {
req.setTimeout(TIMEOUT); req.setTimeout(TIMEOUT)
res.setTimeout(TIMEOUT); res.setTimeout(TIMEOUT)
res.removeHeader("X-Powered-By"); res.removeHeader('X-Powered-By')
return next(); return next()
}); })
app.param('project_id', function(req, res, next, project_id) { app.param('project_id', function(req, res, next, project_id) {
if ((project_id != null ? project_id.match(/^[a-zA-Z0-9_-]+$/) : undefined)) { if (project_id != null ? project_id.match(/^[a-zA-Z0-9_-]+$/) : undefined) {
return next(); return next()
} else { } else {
return next(new Error("invalid project id")); return next(new Error('invalid project id'))
} }
}); })
app.param('user_id', function(req, res, next, user_id) { app.param('user_id', function(req, res, next, user_id) {
if ((user_id != null ? user_id.match(/^[0-9a-f]{24}$/) : undefined)) { if (user_id != null ? user_id.match(/^[0-9a-f]{24}$/) : undefined) {
return next(); return next()
} else { } else {
return next(new Error("invalid user id")); return next(new Error('invalid user id'))
} }
}); })
app.param('build_id', function(req, res, next, build_id) { app.param('build_id', function(req, res, next, build_id) {
if ((build_id != null ? build_id.match(OutputCacheManager.BUILD_REGEX) : undefined)) { if (
return next(); build_id != null
} else { ? build_id.match(OutputCacheManager.BUILD_REGEX)
return next(new Error(`invalid build id ${build_id}`)); : undefined
} ) {
}); return next()
} else {
return next(new Error(`invalid build id ${build_id}`))
}
})
app.post(
'/project/:project_id/compile',
bodyParser.json({ limit: Settings.compileSizeLimit }),
CompileController.compile
)
app.post('/project/:project_id/compile/stop', CompileController.stopCompile)
app.delete('/project/:project_id', CompileController.clearCache)
app.post("/project/:project_id/compile", bodyParser.json({limit: Settings.compileSizeLimit}), CompileController.compile); app.get('/project/:project_id/sync/code', CompileController.syncFromCode)
app.post("/project/:project_id/compile/stop", CompileController.stopCompile); app.get('/project/:project_id/sync/pdf', CompileController.syncFromPdf)
app.delete("/project/:project_id", CompileController.clearCache); app.get('/project/:project_id/wordcount', CompileController.wordcount)
app.get('/project/:project_id/status', CompileController.status)
app.get("/project/:project_id/sync/code", CompileController.syncFromCode);
app.get("/project/:project_id/sync/pdf", CompileController.syncFromPdf);
app.get("/project/:project_id/wordcount", CompileController.wordcount);
app.get("/project/:project_id/status", CompileController.status);
// Per-user containers // Per-user containers
app.post("/project/:project_id/user/:user_id/compile", bodyParser.json({limit: Settings.compileSizeLimit}), CompileController.compile); app.post(
app.post("/project/:project_id/user/:user_id/compile/stop", CompileController.stopCompile); '/project/:project_id/user/:user_id/compile',
app.delete("/project/:project_id/user/:user_id", CompileController.clearCache); bodyParser.json({ limit: Settings.compileSizeLimit }),
CompileController.compile
)
app.post(
'/project/:project_id/user/:user_id/compile/stop',
CompileController.stopCompile
)
app.delete('/project/:project_id/user/:user_id', CompileController.clearCache)
app.get("/project/:project_id/user/:user_id/sync/code", CompileController.syncFromCode); app.get(
app.get("/project/:project_id/user/:user_id/sync/pdf", CompileController.syncFromPdf); '/project/:project_id/user/:user_id/sync/code',
app.get("/project/:project_id/user/:user_id/wordcount", CompileController.wordcount); CompileController.syncFromCode
)
app.get(
'/project/:project_id/user/:user_id/sync/pdf',
CompileController.syncFromPdf
)
app.get(
'/project/:project_id/user/:user_id/wordcount',
CompileController.wordcount
)
const ForbidSymlinks = require("./app/js/StaticServerForbidSymlinks"); const ForbidSymlinks = require('./app/js/StaticServerForbidSymlinks')
// create a static server which does not allow access to any symlinks // create a static server which does not allow access to any symlinks
// avoids possible mismatch of root directory between middleware check // avoids possible mismatch of root directory between middleware check
// and serving the files // and serving the files
const staticServer = ForbidSymlinks(express.static, Settings.path.compilesDir, { setHeaders(res, path, stat) { const staticServer = ForbidSymlinks(express.static, Settings.path.compilesDir, {
if (Path.basename(path) === "output.pdf") { setHeaders(res, path, stat) {
// Calculate an etag in the same way as nginx if (Path.basename(path) === 'output.pdf') {
// https://github.com/tj/send/issues/65 // Calculate an etag in the same way as nginx
const etag = (path, stat) => // https://github.com/tj/send/issues/65
`"${Math.ceil(+stat.mtime / 1000).toString(16)}` + const etag = (path, stat) =>
'-' + Number(stat.size).toString(16) + '"' `"${Math.ceil(+stat.mtime / 1000).toString(16)}` +
; '-' +
res.set("Etag", etag(path, stat)); Number(stat.size).toString(16) +
} '"'
return res.set("Content-Type", ContentTypeMapper.map(path)); res.set('Etag', etag(path, stat))
} }
} return res.set('Content-Type', ContentTypeMapper.map(path))
); }
})
app.get("/project/:project_id/user/:user_id/build/:build_id/output/*", function(req, res, next) { app.get('/project/:project_id/user/:user_id/build/:build_id/output/*', function(
// for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId) req,
req.url = `/${req.params.project_id}-${req.params.user_id}/` + OutputCacheManager.path(req.params.build_id, `/${req.params[0]}`); res,
return staticServer(req, res, next); next
}); ) {
// for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
req.url =
`/${req.params.project_id}-${req.params.user_id}/` +
OutputCacheManager.path(req.params.build_id, `/${req.params[0]}`)
return staticServer(req, res, next)
})
app.get("/project/:project_id/build/:build_id/output/*", function(req, res, next) { app.get('/project/:project_id/build/:build_id/output/*', function(
// for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId) req,
req.url = `/${req.params.project_id}/` + OutputCacheManager.path(req.params.build_id, `/${req.params[0]}`); res,
return staticServer(req, res, next); next
}); ) {
// for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
req.url =
`/${req.params.project_id}/` +
OutputCacheManager.path(req.params.build_id, `/${req.params[0]}`)
return staticServer(req, res, next)
})
app.get("/project/:project_id/user/:user_id/output/*", function(req, res, next) { app.get('/project/:project_id/user/:user_id/output/*', function(
// for specific user get the path to the top level file req,
req.url = `/${req.params.project_id}-${req.params.user_id}/${req.params[0]}`; res,
return staticServer(req, res, next); next
}); ) {
// for specific user get the path to the top level file
req.url = `/${req.params.project_id}-${req.params.user_id}/${req.params[0]}`
return staticServer(req, res, next)
})
app.get("/project/:project_id/output/*", function(req, res, next) { app.get('/project/:project_id/output/*', function(req, res, next) {
if (((req.query != null ? req.query.build : undefined) != null) && req.query.build.match(OutputCacheManager.BUILD_REGEX)) { if (
// for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId) (req.query != null ? req.query.build : undefined) != null &&
req.url = `/${req.params.project_id}/` + OutputCacheManager.path(req.query.build, `/${req.params[0]}`); req.query.build.match(OutputCacheManager.BUILD_REGEX)
} else { ) {
req.url = `/${req.params.project_id}/${req.params[0]}`; // for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
} req.url =
return staticServer(req, res, next); `/${req.params.project_id}/` +
}); OutputCacheManager.path(req.query.build, `/${req.params[0]}`)
} else {
req.url = `/${req.params.project_id}/${req.params[0]}`
}
return staticServer(req, res, next)
})
app.get("/oops", function(req, res, next) { app.get('/oops', function(req, res, next) {
logger.error({err: "hello"}, "test error"); logger.error({ err: 'hello' }, 'test error')
return res.send("error\n"); return res.send('error\n')
}); })
app.get('/status', (req, res, next) => res.send('CLSI is alive\n'))
app.get("/status", (req, res, next) => res.send("CLSI is alive\n"));
const resCacher = { const resCacher = {
contentType(setContentType){ contentType(setContentType) {
this.setContentType = setContentType; this.setContentType = setContentType
}, },
send(code, body){ send(code, body) {
this.code = code; this.code = code
this.body = body; this.body = body
}, },
//default the server to be down // default the server to be down
code:500, code: 500,
body:{}, body: {},
setContentType:"application/json" setContentType: 'application/json'
}; }
if (Settings.smokeTest) { if (Settings.smokeTest) {
let runSmokeTest; let runSmokeTest
(runSmokeTest = function() { ;(runSmokeTest = function() {
logger.log("running smoke tests"); logger.log('running smoke tests')
smokeTest.run(require.resolve(__dirname + "/test/smoke/js/SmokeTests.js"))({}, resCacher); smokeTest.run(require.resolve(__dirname + '/test/smoke/js/SmokeTests.js'))(
return setTimeout(runSmokeTest, 30 * 1000); {},
})(); resCacher
)
return setTimeout(runSmokeTest, 30 * 1000)
})()
} }
app.get("/health_check", function(req, res){ app.get('/health_check', function(req, res) {
res.contentType(resCacher != null ? resCacher.setContentType : undefined); res.contentType(resCacher != null ? resCacher.setContentType : undefined)
return res.status(resCacher != null ? resCacher.code : undefined).send(resCacher != null ? resCacher.body : undefined); return res
}); .status(resCacher != null ? resCacher.code : undefined)
.send(resCacher != null ? resCacher.body : undefined)
})
app.get("/smoke_test_force", (req, res)=> smokeTest.run(require.resolve(__dirname + "/test/smoke/js/SmokeTests.js"))(req, res)); app.get('/smoke_test_force', (req, res) =>
smokeTest.run(require.resolve(__dirname + '/test/smoke/js/SmokeTests.js'))(
req,
res
)
)
const profiler = require("v8-profiler-node8"); const profiler = require('v8-profiler-node8')
app.get("/profile", function(req, res) { app.get('/profile', function(req, res) {
const time = parseInt(req.query.time || "1000"); const time = parseInt(req.query.time || '1000')
profiler.startProfiling("test"); profiler.startProfiling('test')
return setTimeout(function() { return setTimeout(function() {
const profile = profiler.stopProfiling("test"); const profile = profiler.stopProfiling('test')
return res.json(profile); return res.json(profile)
} }, time)
, time); })
});
app.get("/heapdump", (req, res)=> app.get('/heapdump', (req, res) =>
require('heapdump').writeSnapshot(`/tmp/${Date.now()}.clsi.heapsnapshot`, (err, filename)=> res.send(filename)) require('heapdump').writeSnapshot(
); `/tmp/${Date.now()}.clsi.heapsnapshot`,
(err, filename) => res.send(filename)
)
)
app.use(function(error, req, res, next) { app.use(function(error, req, res, next) {
if (error instanceof Errors.NotFoundError) { if (error instanceof Errors.NotFoundError) {
logger.warn({err: error, url: req.url}, "not found error"); logger.warn({ err: error, url: req.url }, 'not found error')
return res.sendStatus(404); return res.sendStatus(404)
} else { } else {
logger.error({err: error, url: req.url}, "server error"); logger.error({ err: error, url: req.url }, 'server error')
return res.sendStatus((error != null ? error.statusCode : undefined) || 500); return res.sendStatus((error != null ? error.statusCode : undefined) || 500)
} }
}); })
const net = require("net"); const net = require('net')
const os = require("os"); const os = require('os')
let STATE = "up";
let STATE = 'up'
const loadTcpServer = net.createServer(function(socket) { const loadTcpServer = net.createServer(function(socket) {
socket.on("error", function(err){ socket.on('error', function(err) {
if (err.code === "ECONNRESET") { if (err.code === 'ECONNRESET') {
// this always comes up, we don't know why // this always comes up, we don't know why
return; return
} }
logger.err({err}, "error with socket on load check"); logger.err({ err }, 'error with socket on load check')
return socket.destroy(); return socket.destroy()
}); })
if ((STATE === "up") && Settings.internal.load_balancer_agent.report_load) {
let availableWorkingCpus;
const currentLoad = os.loadavg()[0];
// staging clis's have 1 cpu core only if (STATE === 'up' && Settings.internal.load_balancer_agent.report_load) {
if (os.cpus().length === 1) { let availableWorkingCpus
availableWorkingCpus = 1; const currentLoad = os.loadavg()[0]
} else {
availableWorkingCpus = os.cpus().length - 1;
}
const freeLoad = availableWorkingCpus - currentLoad; // staging clis's have 1 cpu core only
let freeLoadPercentage = Math.round((freeLoad / availableWorkingCpus) * 100); if (os.cpus().length === 1) {
if (freeLoadPercentage <= 0) { availableWorkingCpus = 1
freeLoadPercentage = 1; // when its 0 the server is set to drain and will move projects to different servers } else {
} availableWorkingCpus = os.cpus().length - 1
socket.write(`up, ${freeLoadPercentage}%\n`, "ASCII"); }
return socket.end();
} else {
socket.write(`${STATE}\n`, "ASCII");
return socket.end();
}
});
const loadHttpServer = express(); const freeLoad = availableWorkingCpus - currentLoad
let freeLoadPercentage = Math.round((freeLoad / availableWorkingCpus) * 100)
if (freeLoadPercentage <= 0) {
freeLoadPercentage = 1 // when its 0 the server is set to drain and will move projects to different servers
}
socket.write(`up, ${freeLoadPercentage}%\n`, 'ASCII')
return socket.end()
} else {
socket.write(`${STATE}\n`, 'ASCII')
return socket.end()
}
})
loadHttpServer.post("/state/up", function(req, res, next) { const loadHttpServer = express()
STATE = "up";
logger.info("getting message to set server to down");
return res.sendStatus(204);
});
loadHttpServer.post("/state/down", function(req, res, next) { loadHttpServer.post('/state/up', function(req, res, next) {
STATE = "down"; STATE = 'up'
logger.info("getting message to set server to down"); logger.info('getting message to set server to down')
return res.sendStatus(204); return res.sendStatus(204)
}); })
loadHttpServer.post("/state/maint", function(req, res, next) { loadHttpServer.post('/state/down', function(req, res, next) {
STATE = "maint"; STATE = 'down'
logger.info("getting message to set server to maint"); logger.info('getting message to set server to down')
return res.sendStatus(204); return res.sendStatus(204)
}); })
const port = (__guard__(Settings.internal != null ? Settings.internal.clsi : undefined, x => x.port) || 3013); loadHttpServer.post('/state/maint', function(req, res, next) {
const host = (__guard__(Settings.internal != null ? Settings.internal.clsi : undefined, x1 => x1.host) || "localhost"); STATE = 'maint'
logger.info('getting message to set server to maint')
return res.sendStatus(204)
})
const load_tcp_port = Settings.internal.load_balancer_agent.load_port; const port =
const load_http_port = Settings.internal.load_balancer_agent.local_port; __guard__(
Settings.internal != null ? Settings.internal.clsi : undefined,
x => x.port
) || 3013
const host =
__guard__(
Settings.internal != null ? Settings.internal.clsi : undefined,
x1 => x1.host
) || 'localhost'
if (!module.parent) { // Called directly const load_tcp_port = Settings.internal.load_balancer_agent.load_port
app.listen(port, host, error => logger.info(`CLSI starting up, listening on ${host}:${port}`)); const load_http_port = Settings.internal.load_balancer_agent.local_port
loadTcpServer.listen(load_tcp_port, host, function(error) { if (!module.parent) {
if (error != null) { throw error; } // Called directly
return logger.info(`Load tcp agent listening on load port ${load_tcp_port}`); app.listen(port, host, error =>
}); logger.info(`CLSI starting up, listening on ${host}:${port}`)
)
loadHttpServer.listen(load_http_port, host, function(error) { loadTcpServer.listen(load_tcp_port, host, function(error) {
if (error != null) { throw error; } if (error != null) {
return logger.info(`Load http agent listening on load port ${load_http_port}`); throw error
}); }
return logger.info(`Load tcp agent listening on load port ${load_tcp_port}`)
})
loadHttpServer.listen(load_http_port, host, function(error) {
if (error != null) {
throw error
}
return logger.info(
`Load http agent listening on load port ${load_http_port}`
)
})
} }
module.exports = app; module.exports = app
setInterval(() => ProjectPersistenceManager.clearExpiredProjects()
, (tenMinutes = 10 * 60 * 1000));
setInterval(
() => ProjectPersistenceManager.clearExpiredProjects(),
(tenMinutes = 10 * 60 * 1000)
)
function __guard__(value, transform) { function __guard__(value, transform) {
return (typeof value !== 'undefined' && value !== null) ? transform(value) : undefined; return typeof value !== 'undefined' && value !== null
} ? transform(value)
: undefined
}

View File

@@ -1,89 +1,100 @@
const Path = require("path"); const Path = require('path')
module.exports = { module.exports = {
// Options are passed to Sequelize. // Options are passed to Sequelize.
// See http://sequelizejs.com/documentation#usage-options for details // See http://sequelizejs.com/documentation#usage-options for details
mysql: { mysql: {
clsi: { clsi: {
database: "clsi", database: 'clsi',
username: "clsi", username: 'clsi',
dialect: "sqlite", dialect: 'sqlite',
storage: process.env["SQLITE_PATH"] || Path.resolve(__dirname + "/../db.sqlite"), storage:
pool: { process.env.SQLITE_PATH || Path.resolve(__dirname + '/../db.sqlite'),
max: 1, pool: {
min: 1 max: 1,
}, min: 1
retry: { },
max: 10 retry: {
} max: 10
} }
}, }
},
compileSizeLimit: process.env["COMPILE_SIZE_LIMIT"] || "7mb", compileSizeLimit: process.env.COMPILE_SIZE_LIMIT || '7mb',
path: {
compilesDir: Path.resolve(__dirname + "/../compiles"),
clsiCacheDir: Path.resolve(__dirname + "/../cache"),
synctexBaseDir(project_id) { return Path.join(this.compilesDir, project_id); }
},
internal: { path: {
clsi: { compilesDir: Path.resolve(__dirname + '/../compiles'),
port: 3013, clsiCacheDir: Path.resolve(__dirname + '/../cache'),
host: process.env["LISTEN_ADDRESS"] || "localhost" synctexBaseDir(project_id) {
}, return Path.join(this.compilesDir, project_id)
}
load_balancer_agent: { },
report_load:true,
load_port: 3048,
local_port: 3049
}
},
apis: {
clsi: {
url: `http://${process.env['CLSI_HOST'] || 'localhost'}:3013`
}
},
internal: {
smokeTest: process.env["SMOKE_TEST"] || false, clsi: {
project_cache_length_ms: 1000 * 60 * 60 * 24, port: 3013,
parallelFileDownloads: process.env["FILESTORE_PARALLEL_FILE_DOWNLOADS"] || 1, host: process.env.LISTEN_ADDRESS || 'localhost'
parallelSqlQueryLimit: process.env["FILESTORE_PARALLEL_SQL_QUERY_LIMIT"] || 1, },
filestoreDomainOveride: process.env["FILESTORE_DOMAIN_OVERRIDE"],
texliveImageNameOveride: process.env["TEX_LIVE_IMAGE_NAME_OVERRIDE"],
sentry: {
dsn: process.env['SENTRY_DSN']
}
};
load_balancer_agent: {
report_load: true,
load_port: 3048,
local_port: 3049
}
},
apis: {
clsi: {
url: `http://${process.env.CLSI_HOST || 'localhost'}:3013`
}
},
if (process.env["DOCKER_RUNNER"]) { smokeTest: process.env.SMOKE_TEST || false,
let seccomp_profile_path; project_cache_length_ms: 1000 * 60 * 60 * 24,
module.exports.clsi = { parallelFileDownloads: process.env.FILESTORE_PARALLEL_FILE_DOWNLOADS || 1,
dockerRunner: process.env["DOCKER_RUNNER"] === "true", parallelSqlQueryLimit: process.env.FILESTORE_PARALLEL_SQL_QUERY_LIMIT || 1,
docker: { filestoreDomainOveride: process.env.FILESTORE_DOMAIN_OVERRIDE,
image: process.env["TEXLIVE_IMAGE"] || "quay.io/sharelatex/texlive-full:2017.1", texliveImageNameOveride: process.env.TEX_LIVE_IMAGE_NAME_OVERRIDE,
env: { sentry: {
HOME: "/tmp" dsn: process.env.SENTRY_DSN
}, }
socketPath: "/var/run/docker.sock", }
user: process.env["TEXLIVE_IMAGE_USER"] || "tex"
}, if (process.env.DOCKER_RUNNER) {
expireProjectAfterIdleMs: 24 * 60 * 60 * 1000, let seccomp_profile_path
checkProjectsIntervalMs: 10 * 60 * 1000 module.exports.clsi = {
}; dockerRunner: process.env.DOCKER_RUNNER === 'true',
docker: {
try { image:
seccomp_profile_path = Path.resolve(__dirname + "/../seccomp/clsi-profile.json"); process.env.TEXLIVE_IMAGE ||
module.exports.clsi.docker.seccomp_profile = JSON.stringify(JSON.parse(require("fs").readFileSync(seccomp_profile_path))); 'quay.io/sharelatex/texlive-full:2017.1',
} catch (error) { env: {
console.log(error, `could not load seccom profile from ${seccomp_profile_path}`); HOME: '/tmp'
} },
socketPath: '/var/run/docker.sock',
module.exports.path.synctexBaseDir = () => "/compile"; user: process.env.TEXLIVE_IMAGE_USER || 'tex'
},
module.exports.path.sandboxedCompilesHostDir = process.env["COMPILES_HOST_DIR"]; expireProjectAfterIdleMs: 24 * 60 * 60 * 1000,
checkProjectsIntervalMs: 10 * 60 * 1000
module.exports.path.synctexBinHostPath = process.env["SYNCTEX_BIN_HOST_PATH"]; }
try {
seccomp_profile_path = Path.resolve(
__dirname + '/../seccomp/clsi-profile.json'
)
module.exports.clsi.docker.seccomp_profile = JSON.stringify(
JSON.parse(require('fs').readFileSync(seccomp_profile_path))
)
} catch (error) {
console.log(
error,
`could not load seccom profile from ${seccomp_profile_path}`
)
}
module.exports.path.synctexBaseDir = () => '/compile'
module.exports.path.sandboxedCompilesHostDir =
process.env.COMPILES_HOST_DIR
module.exports.path.synctexBinHostPath = process.env.SYNCTEX_BIN_HOST_PATH
} }