Compare commits
1 Commits
master
...
csh-issue-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3ce68b44d2 |
@@ -8,7 +8,7 @@
|
||||
"prettier/standard"
|
||||
],
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 2018
|
||||
"ecmaVersion": 2017
|
||||
},
|
||||
"plugins": [
|
||||
"mocha",
|
||||
|
||||
23
.github/dependabot.yml
vendored
23
.github/dependabot.yml
vendored
@@ -1,23 +0,0 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
||||
pull-request-branch-name:
|
||||
# Separate sections of the branch name with a hyphen
|
||||
# Docker images use the branch name and do not support slashes in tags
|
||||
# https://github.com/overleaf/google-ops/issues/822
|
||||
# https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#pull-request-branch-nameseparator
|
||||
separator: "-"
|
||||
|
||||
# Block informal upgrades -- security upgrades use a separate queue.
|
||||
# https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#open-pull-requests-limit
|
||||
open-pull-requests-limit: 0
|
||||
|
||||
# currently assign team-magma to all dependabot PRs - this may change in
|
||||
# future if we reorganise teams
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "Team-Magma"
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -11,6 +11,3 @@ db.sqlite-wal
|
||||
db.sqlite-shm
|
||||
config/*
|
||||
npm-debug.log
|
||||
|
||||
# managed by dev-environment$ bin/update_build_scripts
|
||||
.npmrc
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# Instead run bin/update_build_scripts from
|
||||
# https://github.com/sharelatex/sharelatex-dev-environment
|
||||
|
||||
FROM node:10.22.1 as base
|
||||
FROM node:10.21.0 as base
|
||||
|
||||
WORKDIR /app
|
||||
COPY install_deps.sh /app
|
||||
@@ -15,10 +15,12 @@ FROM base as app
|
||||
#wildcard as some files may not be in all repos
|
||||
COPY package*.json npm-shrink*.json /app/
|
||||
|
||||
RUN npm ci --quiet
|
||||
RUN npm install --quiet
|
||||
|
||||
COPY . /app
|
||||
|
||||
|
||||
|
||||
FROM base
|
||||
|
||||
COPY --from=app /app /app
|
||||
|
||||
131
Jenkinsfile
vendored
Normal file
131
Jenkinsfile
vendored
Normal file
@@ -0,0 +1,131 @@
|
||||
String cron_string = BRANCH_NAME == "master" ? "@daily" : ""
|
||||
|
||||
pipeline {
|
||||
agent any
|
||||
|
||||
environment {
|
||||
GIT_PROJECT = "clsi"
|
||||
JENKINS_WORKFLOW = "clsi-sharelatex"
|
||||
TARGET_URL = "${env.JENKINS_URL}blue/organizations/jenkins/${JENKINS_WORKFLOW}/detail/$BRANCH_NAME/$BUILD_NUMBER/pipeline"
|
||||
GIT_API_URL = "https://api.github.com/repos/overleaf/${GIT_PROJECT}/statuses/$GIT_COMMIT"
|
||||
}
|
||||
|
||||
triggers {
|
||||
pollSCM('* * * * *')
|
||||
cron(cron_string)
|
||||
}
|
||||
|
||||
stages {
|
||||
|
||||
stage('Install') {
|
||||
steps {
|
||||
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {
|
||||
sh "curl $GIT_API_URL \
|
||||
--data '{ \
|
||||
\"state\" : \"pending\", \
|
||||
\"target_url\": \"$TARGET_URL\", \
|
||||
\"description\": \"Your build is underway\", \
|
||||
\"context\": \"ci/jenkins\" }' \
|
||||
-u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Build') {
|
||||
steps {
|
||||
sh 'make build'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Linting') {
|
||||
steps {
|
||||
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make format'
|
||||
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make lint'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Unit Tests') {
|
||||
steps {
|
||||
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_unit'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Acceptance Tests') {
|
||||
steps {
|
||||
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_acceptance'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Package and docker push') {
|
||||
steps {
|
||||
sh 'echo ${BUILD_NUMBER} > build_number.txt'
|
||||
sh 'touch build.tar.gz' // Avoid tar warning about files changing during read
|
||||
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make tar'
|
||||
|
||||
withCredentials([file(credentialsId: 'gcr.io_overleaf-ops', variable: 'DOCKER_REPO_KEY_PATH')]) {
|
||||
sh 'docker login -u _json_key --password-stdin https://gcr.io/overleaf-ops < ${DOCKER_REPO_KEY_PATH}'
|
||||
}
|
||||
sh 'DOCKER_REPO=gcr.io/overleaf-ops make publish'
|
||||
sh 'docker logout https://gcr.io/overleaf-ops'
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
stage('Publish to s3') {
|
||||
steps {
|
||||
sh 'echo ${BRANCH_NAME}-${BUILD_NUMBER} > build_number.txt'
|
||||
withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") {
|
||||
s3Upload(file:'build.tar.gz', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/${BUILD_NUMBER}.tar.gz")
|
||||
}
|
||||
withAWS(credentials:'S3_CI_BUILDS_AWS_KEYS', region:"${S3_REGION_BUILD_ARTEFACTS}") {
|
||||
// The deployment process uses this file to figure out the latest build
|
||||
s3Upload(file:'build_number.txt', bucket:"${S3_BUCKET_BUILD_ARTEFACTS}", path:"${JOB_NAME}/latest")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
post {
|
||||
always {
|
||||
sh 'DOCKER_COMPOSE_FLAGS="-f docker-compose.ci.yml" make test_clean'
|
||||
sh 'make clean'
|
||||
}
|
||||
|
||||
success {
|
||||
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {
|
||||
sh "curl $GIT_API_URL \
|
||||
--data '{ \
|
||||
\"state\" : \"success\", \
|
||||
\"target_url\": \"$TARGET_URL\", \
|
||||
\"description\": \"Your build succeeded!\", \
|
||||
\"context\": \"ci/jenkins\" }' \
|
||||
-u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD"
|
||||
}
|
||||
}
|
||||
|
||||
failure {
|
||||
mail(from: "${EMAIL_ALERT_FROM}",
|
||||
to: "${EMAIL_ALERT_TO}",
|
||||
subject: "Jenkins build failed: ${JOB_NAME}:${BUILD_NUMBER}",
|
||||
body: "Build: ${BUILD_URL}")
|
||||
withCredentials([usernamePassword(credentialsId: 'GITHUB_INTEGRATION', usernameVariable: 'GH_AUTH_USERNAME', passwordVariable: 'GH_AUTH_PASSWORD')]) {
|
||||
sh "curl $GIT_API_URL \
|
||||
--data '{ \
|
||||
\"state\" : \"failure\", \
|
||||
\"target_url\": \"$TARGET_URL\", \
|
||||
\"description\": \"Your build failed\", \
|
||||
\"context\": \"ci/jenkins\" }' \
|
||||
-u $GH_AUTH_USERNAME:$GH_AUTH_PASSWORD"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// The options directive is for configuration that applies to the whole job.
|
||||
options {
|
||||
// we'd like to make sure remove old builds, so we don't fill up our storage!
|
||||
buildDiscarder(logRotator(numToKeepStr:'50'))
|
||||
|
||||
// And we'd really like to be sure that this build doesn't hang forever, so let's time it out after:
|
||||
timeout(time: 30, unit: 'MINUTES')
|
||||
}
|
||||
}
|
||||
6
Makefile
6
Makefile
@@ -25,13 +25,13 @@ clean:
|
||||
docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||
|
||||
format:
|
||||
$(DOCKER_COMPOSE) run --rm test_unit npm run --silent format
|
||||
$(DOCKER_COMPOSE) run --rm test_unit npm run format
|
||||
|
||||
format_fix:
|
||||
$(DOCKER_COMPOSE) run --rm test_unit npm run --silent format:fix
|
||||
$(DOCKER_COMPOSE) run --rm test_unit npm run format:fix
|
||||
|
||||
lint:
|
||||
$(DOCKER_COMPOSE) run --rm test_unit npm run --silent lint
|
||||
$(DOCKER_COMPOSE) run --rm test_unit npm run lint
|
||||
|
||||
test: format lint test_unit test_acceptance
|
||||
|
||||
|
||||
19
README.md
19
README.md
@@ -1,8 +1,6 @@
|
||||
overleaf/clsi
|
||||
===============
|
||||
|
||||
**Note:** Original repo here: https://github.com/overleaf/clsi
|
||||
|
||||
A web api for compiling LaTeX documents in the cloud
|
||||
|
||||
The Common LaTeX Service Interface (CLSI) provides a RESTful interface to traditional LaTeX tools (or, more generally, any command line tool for composing marked-up documents into a display format such as PDF or HTML). The CLSI listens on the following ports by default:
|
||||
@@ -36,21 +34,20 @@ The CLSI can be installed and set up as part of the entire [Overleaf stack](http
|
||||
|
||||
$ git clone git@github.com:overleaf/clsi.git
|
||||
|
||||
Then install the require npm modules and run:
|
||||
Then install the require npm modules:
|
||||
|
||||
$ npm install
|
||||
$ npm start
|
||||
|
||||
Then compile the coffee script source files:
|
||||
|
||||
$ grunt install
|
||||
|
||||
Finally, (after configuring your local database - see the Config section), run the CLSI service:
|
||||
|
||||
$ grunt run
|
||||
|
||||
The CLSI should then be running at http://localhost:3013.
|
||||
|
||||
**Note:** to install texlive-full on ubuntu:
|
||||
$ sudo apt install texlive-full
|
||||
|
||||
Possible REST API clients to test are:
|
||||
* Postman
|
||||
* Insomnia
|
||||
|
||||
Config
|
||||
------
|
||||
|
||||
|
||||
19
app.js
19
app.js
@@ -134,16 +134,17 @@ const staticServer = ForbidSymlinks(express.static, Settings.path.compilesDir, {
|
||||
}
|
||||
})
|
||||
|
||||
app.get(
|
||||
'/project/:project_id/user/:user_id/build/:build_id/output/*',
|
||||
function (req, res, next) {
|
||||
app.get('/project/:project_id/user/:user_id/build/:build_id/output/*', function(
|
||||
req,
|
||||
res,
|
||||
next
|
||||
) {
|
||||
// for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId)
|
||||
req.url =
|
||||
`/${req.params.project_id}-${req.params.user_id}/` +
|
||||
OutputCacheManager.path(req.params.build_id, `/${req.params[0]}`)
|
||||
return staticServer(req, res, next)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
app.get('/project/:project_id/build/:build_id/output/*', function(
|
||||
req,
|
||||
@@ -207,7 +208,7 @@ if (Settings.processLifespanLimitMs) {
|
||||
function runSmokeTest() {
|
||||
if (Settings.processTooOld) return
|
||||
logger.log('running smoke tests')
|
||||
smokeTest.triggerRun((err) => {
|
||||
smokeTest.triggerRun(err => {
|
||||
if (err) logger.error({ err }, 'smoke tests failed')
|
||||
setTimeout(runSmokeTest, 30 * 1000)
|
||||
})
|
||||
@@ -300,12 +301,12 @@ loadHttpServer.post('/state/maint', function (req, res, next) {
|
||||
const port =
|
||||
__guard__(
|
||||
Settings.internal != null ? Settings.internal.clsi : undefined,
|
||||
(x) => x.port
|
||||
x => x.port
|
||||
) || 3013
|
||||
const host =
|
||||
__guard__(
|
||||
Settings.internal != null ? Settings.internal.clsi : undefined,
|
||||
(x1) => x1.host
|
||||
x1 => x1.host
|
||||
) || 'localhost'
|
||||
|
||||
const loadTcpPort = Settings.internal.load_balancer_agent.load_port
|
||||
@@ -313,7 +314,7 @@ const loadHttpPort = Settings.internal.load_balancer_agent.local_port
|
||||
|
||||
if (!module.parent) {
|
||||
// Called directly
|
||||
app.listen(port, host, (error) => {
|
||||
app.listen(port, host, error => {
|
||||
if (error) {
|
||||
logger.fatal({ error }, `Error starting CLSI on ${host}:${port}`)
|
||||
} else {
|
||||
|
||||
@@ -116,7 +116,7 @@ module.exports = CompileController = {
|
||||
compile: {
|
||||
status,
|
||||
error: (error != null ? error.message : undefined) || error,
|
||||
outputFiles: outputFiles.map((file) => ({
|
||||
outputFiles: outputFiles.map(file => ({
|
||||
url:
|
||||
`${Settings.apis.clsi.url}/project/${request.project_id}` +
|
||||
(request.user_id != null
|
||||
@@ -197,21 +197,17 @@ module.exports = CompileController = {
|
||||
const v = parseFloat(req.query.v)
|
||||
const { project_id } = req.params
|
||||
const { user_id } = req.params
|
||||
return CompileManager.syncFromPdf(
|
||||
project_id,
|
||||
user_id,
|
||||
page,
|
||||
h,
|
||||
v,
|
||||
function (error, codePositions) {
|
||||
return CompileManager.syncFromPdf(project_id, user_id, page, h, v, function(
|
||||
error,
|
||||
codePositions
|
||||
) {
|
||||
if (error != null) {
|
||||
return next(error)
|
||||
}
|
||||
return res.json({
|
||||
code: codePositions
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
wordcount(req, res, next) {
|
||||
@@ -222,15 +218,6 @@ module.exports = CompileController = {
|
||||
const { project_id } = req.params
|
||||
const { user_id } = req.params
|
||||
const { image } = req.query
|
||||
if (
|
||||
image &&
|
||||
Settings.clsi &&
|
||||
Settings.clsi.docker &&
|
||||
Settings.clsi.docker.allowedImages &&
|
||||
!Settings.clsi.docker.allowedImages.includes(image)
|
||||
) {
|
||||
return res.status(400).send('invalid image')
|
||||
}
|
||||
logger.log({ image, file, project_id }, 'word count request')
|
||||
|
||||
return CompileManager.wordcount(project_id, user_id, file, image, function(
|
||||
|
||||
@@ -61,7 +61,7 @@ module.exports = CompileManager = {
|
||||
}
|
||||
return LockManager.runWithLock(
|
||||
lockFile,
|
||||
(releaseLock) => CompileManager.doCompile(request, releaseLock),
|
||||
releaseLock => CompileManager.doCompile(request, releaseLock),
|
||||
callback
|
||||
)
|
||||
})
|
||||
@@ -120,7 +120,7 @@ module.exports = CompileManager = {
|
||||
}
|
||||
}
|
||||
|
||||
const createTikzFileIfRequired = (callback) =>
|
||||
const createTikzFileIfRequired = callback =>
|
||||
TikzManager.checkMainFile(
|
||||
compileDir,
|
||||
request.rootResourcePath,
|
||||
@@ -177,9 +177,9 @@ module.exports = CompileManager = {
|
||||
request.imageName != null
|
||||
? request.imageName.match(/:(.*)/)
|
||||
: undefined,
|
||||
(x1) => x1[1]
|
||||
x1 => x1[1]
|
||||
),
|
||||
(x) => x.replace(/\./g, '-')
|
||||
x => x.replace(/\./g, '-')
|
||||
) || 'default'
|
||||
if (!request.project_id.match(/^[0-9a-f]{24}$/)) {
|
||||
tag = 'other'
|
||||
@@ -206,7 +206,9 @@ module.exports = CompileManager = {
|
||||
// request was for validation only
|
||||
let metric_key, metric_value
|
||||
if (request.check === 'validate') {
|
||||
const result = (error != null ? error.code : undefined)
|
||||
const result = (error != null
|
||||
? error.code
|
||||
: undefined)
|
||||
? 'fail'
|
||||
: 'pass'
|
||||
error = new Error('validation')
|
||||
@@ -337,7 +339,7 @@ module.exports = CompileManager = {
|
||||
proc.on('error', callback)
|
||||
|
||||
let stderr = ''
|
||||
proc.stderr.setEncoding('utf8').on('data', (chunk) => (stderr += chunk))
|
||||
proc.stderr.setEncoding('utf8').on('data', chunk => (stderr += chunk))
|
||||
|
||||
return proc.on('close', function(code) {
|
||||
if (code === 0) {
|
||||
@@ -358,7 +360,7 @@ module.exports = CompileManager = {
|
||||
if (err != null) {
|
||||
return callback(err)
|
||||
}
|
||||
const allDirs = Array.from(files).map((file) => Path.join(root, file))
|
||||
const allDirs = Array.from(files).map(file => Path.join(root, file))
|
||||
return callback(null, allDirs)
|
||||
})
|
||||
},
|
||||
@@ -510,7 +512,7 @@ module.exports = CompileManager = {
|
||||
const timeout = 60 * 1000 // increased to allow for large projects
|
||||
const compileName = getCompileName(project_id, user_id)
|
||||
const compileGroup = 'synctex'
|
||||
CompileManager._checkFileExists(directory, 'output.synctex.gz', (error) => {
|
||||
CompileManager._checkFileExists(directory, 'output.synctex.gz', error => {
|
||||
if (error) {
|
||||
return callback(error)
|
||||
}
|
||||
@@ -518,9 +520,7 @@ module.exports = CompileManager = {
|
||||
compileName,
|
||||
command,
|
||||
directory,
|
||||
Settings.clsi && Settings.clsi.docker
|
||||
? Settings.clsi.docker.image
|
||||
: undefined,
|
||||
Settings.clsi != null ? Settings.clsi.docker.image : undefined,
|
||||
timeout,
|
||||
{},
|
||||
compileGroup,
|
||||
|
||||
@@ -1,3 +1,21 @@
|
||||
/* eslint-disable
|
||||
camelcase,
|
||||
handle-callback-err,
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS103: Rewrite code to no longer use __guard__
|
||||
* DS205: Consider reworking code to avoid use of IIFEs
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let DockerRunner, oneHour
|
||||
const Settings = require('settings-sharelatex')
|
||||
const logger = require('logger-sharelatex')
|
||||
const Docker = require('dockerode')
|
||||
@@ -9,23 +27,25 @@ const fs = require('fs')
|
||||
const Path = require('path')
|
||||
const _ = require('lodash')
|
||||
|
||||
const ONE_HOUR_IN_MS = 60 * 60 * 1000
|
||||
logger.info('using docker runner')
|
||||
|
||||
function usingSiblingContainers() {
|
||||
return (
|
||||
Settings != null &&
|
||||
Settings.path != null &&
|
||||
Settings.path.sandboxedCompilesHostDir != null
|
||||
)
|
||||
}
|
||||
const usingSiblingContainers = () =>
|
||||
__guard__(
|
||||
Settings != null ? Settings.path : undefined,
|
||||
x => x.sandboxedCompilesHostDir
|
||||
) != null
|
||||
|
||||
let containerMonitorTimeout
|
||||
let containerMonitorInterval
|
||||
|
||||
const DockerRunner = {
|
||||
module.exports = DockerRunner = {
|
||||
ERR_NOT_DIRECTORY: new Error('not a directory'),
|
||||
ERR_TERMINATED: new Error('terminated'),
|
||||
ERR_EXITED: new Error('exited'),
|
||||
ERR_TIMED_OUT: new Error('container timed out'),
|
||||
|
||||
run(
|
||||
projectId,
|
||||
project_id,
|
||||
command,
|
||||
directory,
|
||||
image,
|
||||
@@ -34,6 +54,10 @@ const DockerRunner = {
|
||||
compileGroup,
|
||||
callback
|
||||
) {
|
||||
let name
|
||||
if (callback == null) {
|
||||
callback = function(error, output) {}
|
||||
}
|
||||
if (usingSiblingContainers()) {
|
||||
const _newPath = Settings.path.sandboxedCompilesHostDir
|
||||
logger.log(
|
||||
@@ -50,20 +74,16 @@ const DockerRunner = {
|
||||
)
|
||||
}
|
||||
|
||||
const volumes = { [directory]: '/compile' }
|
||||
const volumes = {}
|
||||
volumes[directory] = '/compile'
|
||||
|
||||
command = command.map((arg) =>
|
||||
arg.toString().replace('$COMPILE_DIR', '/compile')
|
||||
command = Array.from(command).map(arg =>
|
||||
__guardMethod__(arg.toString(), 'replace', o =>
|
||||
o.replace('$COMPILE_DIR', '/compile')
|
||||
)
|
||||
)
|
||||
if (image == null) {
|
||||
image = Settings.clsi.docker.image
|
||||
}
|
||||
|
||||
if (
|
||||
Settings.clsi.docker.allowedImages &&
|
||||
!Settings.clsi.docker.allowedImages.includes(image)
|
||||
) {
|
||||
return callback(new Error('image not allowed'))
|
||||
;({ image } = Settings.clsi.docker)
|
||||
}
|
||||
|
||||
if (Settings.texliveImageNameOveride != null) {
|
||||
@@ -80,27 +100,25 @@ const DockerRunner = {
|
||||
compileGroup
|
||||
)
|
||||
const fingerprint = DockerRunner._fingerprintContainer(options)
|
||||
const name = `project-${projectId}-${fingerprint}`
|
||||
options.name = name
|
||||
options.name = name = `project-${project_id}-${fingerprint}`
|
||||
|
||||
// logOptions = _.clone(options)
|
||||
// logOptions?.HostConfig?.SecurityOpt = "secomp used, removed in logging"
|
||||
logger.log({ projectId }, 'running docker container')
|
||||
DockerRunner._runAndWaitForContainer(
|
||||
options,
|
||||
volumes,
|
||||
timeout,
|
||||
(error, output) => {
|
||||
logger.log({ project_id }, 'running docker container')
|
||||
DockerRunner._runAndWaitForContainer(options, volumes, timeout, function(
|
||||
error,
|
||||
output
|
||||
) {
|
||||
if (error && error.statusCode === 500) {
|
||||
logger.log(
|
||||
{ err: error, projectId },
|
||||
{ err: error, project_id },
|
||||
'error running container so destroying and retrying'
|
||||
)
|
||||
DockerRunner.destroyContainer(name, null, true, (error) => {
|
||||
return DockerRunner.destroyContainer(name, null, true, function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
DockerRunner._runAndWaitForContainer(
|
||||
return DockerRunner._runAndWaitForContainer(
|
||||
options,
|
||||
volumes,
|
||||
timeout,
|
||||
@@ -108,93 +126,108 @@ const DockerRunner = {
|
||||
)
|
||||
})
|
||||
} else {
|
||||
callback(error, output)
|
||||
return callback(error, output)
|
||||
}
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
// pass back the container name to allow it to be killed
|
||||
return name
|
||||
},
|
||||
}, // pass back the container name to allow it to be killed
|
||||
|
||||
kill(containerId, callback) {
|
||||
logger.log({ containerId }, 'sending kill signal to container')
|
||||
const container = dockerode.getContainer(containerId)
|
||||
container.kill((error) => {
|
||||
kill(container_id, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
logger.log({ container_id }, 'sending kill signal to container')
|
||||
const container = dockerode.getContainer(container_id)
|
||||
return container.kill(function(error) {
|
||||
if (
|
||||
error != null &&
|
||||
error.message != null &&
|
||||
error.message.match(/Cannot kill container .* is not running/)
|
||||
__guardMethod__(error != null ? error.message : undefined, 'match', o =>
|
||||
o.match(/Cannot kill container .* is not running/)
|
||||
)
|
||||
) {
|
||||
logger.warn(
|
||||
{ err: error, containerId },
|
||||
{ err: error, container_id },
|
||||
'container not running, continuing'
|
||||
)
|
||||
error = null
|
||||
}
|
||||
if (error != null) {
|
||||
logger.error({ err: error, containerId }, 'error killing container')
|
||||
callback(error)
|
||||
logger.error({ err: error, container_id }, 'error killing container')
|
||||
return callback(error)
|
||||
} else {
|
||||
callback()
|
||||
return callback()
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
_runAndWaitForContainer(options, volumes, timeout, _callback) {
|
||||
const callback = _.once(_callback)
|
||||
if (_callback == null) {
|
||||
_callback = function(error, output) {}
|
||||
}
|
||||
const callback = function(...args) {
|
||||
_callback(...Array.from(args || []))
|
||||
// Only call the callback once
|
||||
return (_callback = function() {})
|
||||
}
|
||||
|
||||
const { name } = options
|
||||
|
||||
let streamEnded = false
|
||||
let containerReturned = false
|
||||
let output = {}
|
||||
|
||||
function callbackIfFinished() {
|
||||
const callbackIfFinished = function() {
|
||||
if (streamEnded && containerReturned) {
|
||||
callback(null, output)
|
||||
return callback(null, output)
|
||||
}
|
||||
}
|
||||
|
||||
function attachStreamHandler(error, _output) {
|
||||
const attachStreamHandler = function(error, _output) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
output = _output
|
||||
streamEnded = true
|
||||
callbackIfFinished()
|
||||
return callbackIfFinished()
|
||||
}
|
||||
|
||||
DockerRunner.startContainer(
|
||||
return DockerRunner.startContainer(
|
||||
options,
|
||||
volumes,
|
||||
attachStreamHandler,
|
||||
(error, containerId) => {
|
||||
function(error, containerId) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
|
||||
DockerRunner.waitForContainer(name, timeout, (error, exitCode) => {
|
||||
return DockerRunner.waitForContainer(name, timeout, function(
|
||||
error,
|
||||
exitCode
|
||||
) {
|
||||
let err
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
if (exitCode === 137) {
|
||||
// exit status from kill -9
|
||||
const err = new Error('terminated')
|
||||
err = DockerRunner.ERR_TERMINATED
|
||||
err.terminated = true
|
||||
return callback(err)
|
||||
}
|
||||
if (exitCode === 1) {
|
||||
// exit status from chktex
|
||||
const err = new Error('exited')
|
||||
err = DockerRunner.ERR_EXITED
|
||||
err.code = exitCode
|
||||
return callback(err)
|
||||
}
|
||||
containerReturned = true
|
||||
if (options != null && options.HostConfig != null) {
|
||||
options.HostConfig.SecurityOpt = null
|
||||
}
|
||||
logger.log({ exitCode, options }, 'docker container has exited')
|
||||
callbackIfFinished()
|
||||
__guard__(
|
||||
options != null ? options.HostConfig : undefined,
|
||||
x => (x.SecurityOpt = null)
|
||||
) // small log line
|
||||
logger.log({ err, exitCode, options }, 'docker container has exited')
|
||||
return callbackIfFinished()
|
||||
})
|
||||
}
|
||||
)
|
||||
@@ -208,11 +241,13 @@ const DockerRunner = {
|
||||
environment,
|
||||
compileGroup
|
||||
) {
|
||||
let m, year
|
||||
let key, value, hostVol, dockerVol
|
||||
const timeoutInSeconds = timeout / 1000
|
||||
|
||||
const dockerVolumes = {}
|
||||
for (const hostVol in volumes) {
|
||||
const dockerVol = volumes[hostVol]
|
||||
for (hostVol in volumes) {
|
||||
dockerVol = volumes[hostVol]
|
||||
dockerVolumes[dockerVol] = {}
|
||||
|
||||
if (volumes[hostVol].slice(-3).indexOf(':r') === -1) {
|
||||
@@ -223,14 +258,17 @@ const DockerRunner = {
|
||||
// merge settings and environment parameter
|
||||
const env = {}
|
||||
for (const src of [Settings.clsi.docker.env, environment || {}]) {
|
||||
for (const key in src) {
|
||||
const value = src[key]
|
||||
for (key in src) {
|
||||
value = src[key]
|
||||
env[key] = value
|
||||
}
|
||||
}
|
||||
// set the path based on the image year
|
||||
const match = image.match(/:([0-9]+)\.[0-9]+/)
|
||||
const year = match ? match[1] : '2014'
|
||||
if ((m = image.match(/:([0-9]+)\.[0-9]+/))) {
|
||||
year = m[1]
|
||||
} else {
|
||||
year = '2014'
|
||||
}
|
||||
env.PATH = `/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/${year}/bin/x86_64-linux/`
|
||||
const options = {
|
||||
Cmd: command,
|
||||
@@ -240,11 +278,23 @@ const DockerRunner = {
|
||||
NetworkDisabled: true,
|
||||
Memory: 1024 * 1024 * 1024 * 1024, // 1 Gb
|
||||
User: Settings.clsi.docker.user,
|
||||
Env: Object.entries(env).map(([key, value]) => `${key}=${value}`),
|
||||
Env: (() => {
|
||||
const result = []
|
||||
for (key in env) {
|
||||
value = env[key]
|
||||
result.push(`${key}=${value}`)
|
||||
}
|
||||
return result
|
||||
})(), // convert the environment hash to an array
|
||||
HostConfig: {
|
||||
Binds: Object.entries(volumes).map(
|
||||
([hostVol, dockerVol]) => `${hostVol}:${dockerVol}`
|
||||
),
|
||||
Binds: (() => {
|
||||
const result1 = []
|
||||
for (hostVol in volumes) {
|
||||
dockerVol = volumes[hostVol]
|
||||
result1.push(`${hostVol}:${dockerVol}`)
|
||||
}
|
||||
return result1
|
||||
})(),
|
||||
LogConfig: { Type: 'none', Config: {} },
|
||||
Ulimits: [
|
||||
{
|
||||
@@ -258,7 +308,10 @@ const DockerRunner = {
|
||||
}
|
||||
}
|
||||
|
||||
if (Settings.path != null && Settings.path.synctexBinHostPath != null) {
|
||||
if (
|
||||
(Settings.path != null ? Settings.path.synctexBinHostPath : undefined) !=
|
||||
null
|
||||
) {
|
||||
options.HostConfig.Binds.push(
|
||||
`${Settings.path.synctexBinHostPath}:/opt/synctex:ro`
|
||||
)
|
||||
@@ -277,7 +330,6 @@ const DockerRunner = {
|
||||
if (Settings.clsi.docker.Readonly) {
|
||||
options.HostConfig.ReadonlyRootfs = true
|
||||
options.HostConfig.Tmpfs = { '/tmp': 'rw,noexec,nosuid,size=65536k' }
|
||||
options.Volumes['/home/tex'] = {}
|
||||
}
|
||||
|
||||
// Allow per-compile group overriding of individual settings
|
||||
@@ -286,7 +338,8 @@ const DockerRunner = {
|
||||
Settings.clsi.docker.compileGroupConfig[compileGroup]
|
||||
) {
|
||||
const override = Settings.clsi.docker.compileGroupConfig[compileGroup]
|
||||
for (const key in override) {
|
||||
let key
|
||||
for (key in override) {
|
||||
_.set(options, key, override[key])
|
||||
}
|
||||
}
|
||||
@@ -297,22 +350,25 @@ const DockerRunner = {
|
||||
_fingerprintContainer(containerOptions) {
|
||||
// Yay, Hashing!
|
||||
const json = JSON.stringify(containerOptions)
|
||||
return crypto.createHash('md5').update(json).digest('hex')
|
||||
return crypto
|
||||
.createHash('md5')
|
||||
.update(json)
|
||||
.digest('hex')
|
||||
},
|
||||
|
||||
startContainer(options, volumes, attachStreamHandler, callback) {
|
||||
LockManager.runWithLock(
|
||||
return LockManager.runWithLock(
|
||||
options.name,
|
||||
(releaseLock) =>
|
||||
releaseLock =>
|
||||
// Check that volumes exist before starting the container.
|
||||
// When a container is started with volume pointing to a
|
||||
// non-existent directory then docker creates the directory but
|
||||
// with root ownership.
|
||||
DockerRunner._checkVolumes(options, volumes, (err) => {
|
||||
DockerRunner._checkVolumes(options, volumes, function(err) {
|
||||
if (err != null) {
|
||||
return releaseLock(err)
|
||||
}
|
||||
DockerRunner._startContainer(
|
||||
return DockerRunner._startContainer(
|
||||
options,
|
||||
volumes,
|
||||
attachStreamHandler,
|
||||
@@ -326,85 +382,93 @@ const DockerRunner = {
|
||||
|
||||
// Check that volumes exist and are directories
|
||||
_checkVolumes(options, volumes, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, containerName) {}
|
||||
}
|
||||
if (usingSiblingContainers()) {
|
||||
// Server Pro, with sibling-containers active, skip checks
|
||||
return callback(null)
|
||||
}
|
||||
|
||||
const checkVolume = (path, cb) =>
|
||||
fs.stat(path, (err, stats) => {
|
||||
fs.stat(path, function(err, stats) {
|
||||
if (err != null) {
|
||||
return cb(err)
|
||||
}
|
||||
if (!stats.isDirectory()) {
|
||||
return cb(new Error('not a directory'))
|
||||
if (!(stats != null ? stats.isDirectory() : undefined)) {
|
||||
return cb(DockerRunner.ERR_NOT_DIRECTORY)
|
||||
}
|
||||
cb()
|
||||
return cb()
|
||||
})
|
||||
const jobs = []
|
||||
for (const vol in volumes) {
|
||||
jobs.push((cb) => checkVolume(vol, cb))
|
||||
;(vol => jobs.push(cb => checkVolume(vol, cb)))(vol)
|
||||
}
|
||||
async.series(jobs, callback)
|
||||
return async.series(jobs, callback)
|
||||
},
|
||||
|
||||
_startContainer(options, volumes, attachStreamHandler, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, output) {}
|
||||
}
|
||||
callback = _.once(callback)
|
||||
const { name } = options
|
||||
|
||||
logger.log({ container_name: name }, 'starting container')
|
||||
const container = dockerode.getContainer(name)
|
||||
|
||||
function createAndStartContainer() {
|
||||
dockerode.createContainer(options, (error, container) => {
|
||||
const createAndStartContainer = () =>
|
||||
dockerode.createContainer(options, function(error, container) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
startExistingContainer()
|
||||
return startExistingContainer()
|
||||
})
|
||||
}
|
||||
|
||||
function startExistingContainer() {
|
||||
var startExistingContainer = () =>
|
||||
DockerRunner.attachToContainer(
|
||||
options.name,
|
||||
attachStreamHandler,
|
||||
(error) => {
|
||||
function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
container.start((error) => {
|
||||
if (error != null && error.statusCode !== 304) {
|
||||
callback(error)
|
||||
} else {
|
||||
return container.start(function(error) {
|
||||
if (
|
||||
error != null &&
|
||||
(error != null ? error.statusCode : undefined) !== 304
|
||||
) {
|
||||
// already running
|
||||
callback()
|
||||
return callback(error)
|
||||
} else {
|
||||
return callback()
|
||||
}
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
container.inspect((error, stats) => {
|
||||
if (error != null && error.statusCode === 404) {
|
||||
createAndStartContainer()
|
||||
return container.inspect(function(error, stats) {
|
||||
if ((error != null ? error.statusCode : undefined) === 404) {
|
||||
return createAndStartContainer()
|
||||
} else if (error != null) {
|
||||
logger.err(
|
||||
{ container_name: name, error },
|
||||
'unable to inspect container to start'
|
||||
)
|
||||
callback(error)
|
||||
return callback(error)
|
||||
} else {
|
||||
startExistingContainer()
|
||||
return startExistingContainer()
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
attachToContainer(containerId, attachStreamHandler, attachStartCallback) {
|
||||
const container = dockerode.getContainer(containerId)
|
||||
container.attach({ stdout: 1, stderr: 1, stream: 1 }, (error, stream) => {
|
||||
return container.attach({ stdout: 1, stderr: 1, stream: 1 }, function(
|
||||
error,
|
||||
stream
|
||||
) {
|
||||
if (error != null) {
|
||||
logger.error(
|
||||
{ err: error, containerId },
|
||||
{ err: error, container_id: containerId },
|
||||
'error attaching to container'
|
||||
)
|
||||
return attachStartCallback(error)
|
||||
@@ -412,10 +476,10 @@ const DockerRunner = {
|
||||
attachStartCallback()
|
||||
}
|
||||
|
||||
logger.log({ containerId }, 'attached to container')
|
||||
logger.log({ container_id: containerId }, 'attached to container')
|
||||
|
||||
const MAX_OUTPUT = 1024 * 1024 // limit output to 1MB
|
||||
function createStringOutputStream(name) {
|
||||
const createStringOutputStream = function(name) {
|
||||
return {
|
||||
data: '',
|
||||
overflowed: false,
|
||||
@@ -424,18 +488,18 @@ const DockerRunner = {
|
||||
return
|
||||
}
|
||||
if (this.data.length < MAX_OUTPUT) {
|
||||
this.data += data
|
||||
return (this.data += data)
|
||||
} else {
|
||||
logger.error(
|
||||
{
|
||||
containerId,
|
||||
container_id: containerId,
|
||||
length: this.data.length,
|
||||
maxLen: MAX_OUTPUT
|
||||
},
|
||||
`${name} exceeds max size`
|
||||
)
|
||||
this.data += `(...truncated at ${MAX_OUTPUT} chars...)`
|
||||
this.overflowed = true
|
||||
return (this.overflowed = true)
|
||||
}
|
||||
}
|
||||
// kill container if too much output
|
||||
@@ -448,52 +512,63 @@ const DockerRunner = {
|
||||
|
||||
container.modem.demuxStream(stream, stdout, stderr)
|
||||
|
||||
stream.on('error', (err) =>
|
||||
stream.on('error', err =>
|
||||
logger.error(
|
||||
{ err, containerId },
|
||||
{ err, container_id: containerId },
|
||||
'error reading from container stream'
|
||||
)
|
||||
)
|
||||
|
||||
stream.on('end', () =>
|
||||
return stream.on('end', () =>
|
||||
attachStreamHandler(null, { stdout: stdout.data, stderr: stderr.data })
|
||||
)
|
||||
})
|
||||
},
|
||||
|
||||
waitForContainer(containerId, timeout, _callback) {
|
||||
const callback = _.once(_callback)
|
||||
if (_callback == null) {
|
||||
_callback = function(error, exitCode) {}
|
||||
}
|
||||
const callback = function(...args) {
|
||||
_callback(...Array.from(args || []))
|
||||
// Only call the callback once
|
||||
return (_callback = function() {})
|
||||
}
|
||||
|
||||
const container = dockerode.getContainer(containerId)
|
||||
|
||||
let timedOut = false
|
||||
const timeoutId = setTimeout(() => {
|
||||
const timeoutId = setTimeout(function() {
|
||||
timedOut = true
|
||||
logger.log({ containerId }, 'timeout reached, killing container')
|
||||
container.kill((err) => {
|
||||
logger.warn({ err, containerId }, 'failed to kill container')
|
||||
})
|
||||
logger.log(
|
||||
{ container_id: containerId },
|
||||
'timeout reached, killing container'
|
||||
)
|
||||
return container.kill(function() {})
|
||||
}, timeout)
|
||||
|
||||
logger.log({ containerId }, 'waiting for docker container')
|
||||
container.wait((error, res) => {
|
||||
logger.log({ container_id: containerId }, 'waiting for docker container')
|
||||
return container.wait(function(error, res) {
|
||||
if (error != null) {
|
||||
clearTimeout(timeoutId)
|
||||
logger.error({ err: error, containerId }, 'error waiting for container')
|
||||
logger.error(
|
||||
{ err: error, container_id: containerId },
|
||||
'error waiting for container'
|
||||
)
|
||||
return callback(error)
|
||||
}
|
||||
if (timedOut) {
|
||||
logger.log({ containerId }, 'docker container timed out')
|
||||
error = new Error('container timed out')
|
||||
error = DockerRunner.ERR_TIMED_OUT
|
||||
error.timedout = true
|
||||
callback(error)
|
||||
return callback(error)
|
||||
} else {
|
||||
clearTimeout(timeoutId)
|
||||
logger.log(
|
||||
{ containerId, exitCode: res.StatusCode },
|
||||
{ container_id: containerId, exitCode: res.StatusCode },
|
||||
'docker container returned'
|
||||
)
|
||||
callback(null, res.StatusCode)
|
||||
return callback(null, res.StatusCode)
|
||||
}
|
||||
})
|
||||
},
|
||||
@@ -505,9 +580,12 @@ const DockerRunner = {
|
||||
// async exception, but if you delete by id it just does a normal
|
||||
// error callback. We fall back to deleting by name if no id is
|
||||
// supplied.
|
||||
LockManager.runWithLock(
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return LockManager.runWithLock(
|
||||
containerName,
|
||||
(releaseLock) =>
|
||||
releaseLock =>
|
||||
DockerRunner._destroyContainer(
|
||||
containerId || containerName,
|
||||
shouldForce,
|
||||
@@ -518,31 +596,46 @@ const DockerRunner = {
|
||||
},
|
||||
|
||||
_destroyContainer(containerId, shouldForce, callback) {
|
||||
logger.log({ containerId }, 'destroying docker container')
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
logger.log({ container_id: containerId }, 'destroying docker container')
|
||||
const container = dockerode.getContainer(containerId)
|
||||
container.remove({ force: shouldForce === true, v: true }, (error) => {
|
||||
if (error != null && error.statusCode === 404) {
|
||||
return container.remove({ force: shouldForce === true }, function(error) {
|
||||
if (
|
||||
error != null &&
|
||||
(error != null ? error.statusCode : undefined) === 404
|
||||
) {
|
||||
logger.warn(
|
||||
{ err: error, containerId },
|
||||
{ err: error, container_id: containerId },
|
||||
'container not found, continuing'
|
||||
)
|
||||
error = null
|
||||
}
|
||||
if (error != null) {
|
||||
logger.error({ err: error, containerId }, 'error destroying container')
|
||||
logger.error(
|
||||
{ err: error, container_id: containerId },
|
||||
'error destroying container'
|
||||
)
|
||||
} else {
|
||||
logger.log({ containerId }, 'destroyed container')
|
||||
logger.log({ container_id: containerId }, 'destroyed container')
|
||||
}
|
||||
callback(error)
|
||||
return callback(error)
|
||||
})
|
||||
},
|
||||
|
||||
// handle expiry of docker containers
|
||||
|
||||
MAX_CONTAINER_AGE: Settings.clsi.docker.maxContainerAge || ONE_HOUR_IN_MS,
|
||||
MAX_CONTAINER_AGE:
|
||||
Settings.clsi.docker.maxContainerAge || (oneHour = 60 * 60 * 1000),
|
||||
|
||||
examineOldContainer(container, callback) {
|
||||
const name = container.Name || (container.Names && container.Names[0])
|
||||
if (callback == null) {
|
||||
callback = function(error, name, id, ttl) {}
|
||||
}
|
||||
const name =
|
||||
container.Name ||
|
||||
(container.Names != null ? container.Names[0] : undefined)
|
||||
const created = container.Created * 1000 // creation time is returned in seconds
|
||||
const now = Date.now()
|
||||
const age = now - created
|
||||
@@ -552,29 +645,39 @@ const DockerRunner = {
|
||||
{ containerName: name, created, now, age, maxAge, ttl },
|
||||
'checking whether to destroy container'
|
||||
)
|
||||
return { name, id: container.Id, ttl }
|
||||
return callback(null, name, container.Id, ttl)
|
||||
},
|
||||
|
||||
destroyOldContainers(callback) {
|
||||
dockerode.listContainers({ all: true }, (error, containers) => {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return dockerode.listContainers({ all: true }, function(error, containers) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
const jobs = []
|
||||
for (const container of containers) {
|
||||
const { name, id, ttl } = DockerRunner.examineOldContainer(container)
|
||||
for (const container of Array.from(containers || [])) {
|
||||
;(container =>
|
||||
DockerRunner.examineOldContainer(container, function(
|
||||
err,
|
||||
name,
|
||||
id,
|
||||
ttl
|
||||
) {
|
||||
if (name.slice(0, 9) === '/project-' && ttl <= 0) {
|
||||
// strip the / prefix
|
||||
// the LockManager uses the plain container name
|
||||
const plainName = name.slice(1)
|
||||
jobs.push((cb) =>
|
||||
DockerRunner.destroyContainer(plainName, id, false, () => cb())
|
||||
name = name.slice(1)
|
||||
return jobs.push(cb =>
|
||||
DockerRunner.destroyContainer(name, id, false, () => cb())
|
||||
)
|
||||
}
|
||||
}))(container)
|
||||
}
|
||||
// Ignore errors because some containers get stuck but
|
||||
// will be destroyed next time
|
||||
async.series(jobs, callback)
|
||||
return async.series(jobs, callback)
|
||||
})
|
||||
},
|
||||
|
||||
@@ -591,13 +694,8 @@ const DockerRunner = {
|
||||
const randomDelay = Math.floor(Math.random() * 5 * 60 * 1000)
|
||||
containerMonitorTimeout = setTimeout(() => {
|
||||
containerMonitorInterval = setInterval(
|
||||
() =>
|
||||
DockerRunner.destroyOldContainers((err) => {
|
||||
if (err) {
|
||||
logger.error({ err }, 'failed to destroy old containers')
|
||||
}
|
||||
}),
|
||||
ONE_HOUR_IN_MS
|
||||
() => DockerRunner.destroyOldContainers(),
|
||||
(oneHour = 60 * 60 * 1000)
|
||||
)
|
||||
}, randomDelay)
|
||||
},
|
||||
@@ -608,12 +706,27 @@ const DockerRunner = {
|
||||
containerMonitorTimeout = undefined
|
||||
}
|
||||
if (containerMonitorInterval) {
|
||||
clearInterval(containerMonitorInterval)
|
||||
containerMonitorInterval = undefined
|
||||
clearInterval(containerMonitorTimeout)
|
||||
containerMonitorTimeout = undefined
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
DockerRunner.startContainerMonitor()
|
||||
|
||||
module.exports = DockerRunner
|
||||
function __guard__(value, transform) {
|
||||
return typeof value !== 'undefined' && value !== null
|
||||
? transform(value)
|
||||
: undefined
|
||||
}
|
||||
function __guardMethod__(obj, methodName, transform) {
|
||||
if (
|
||||
typeof obj !== 'undefined' &&
|
||||
obj !== null &&
|
||||
typeof obj[methodName] === 'function'
|
||||
) {
|
||||
return transform(obj, methodName)
|
||||
} else {
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
@@ -96,13 +96,13 @@ module.exports = LatexRunner = {
|
||||
}
|
||||
const runs =
|
||||
__guard__(
|
||||
__guard__(output != null ? output.stderr : undefined, (x1) =>
|
||||
__guard__(output != null ? output.stderr : undefined, x1 =>
|
||||
x1.match(/^Run number \d+ of .*latex/gm)
|
||||
),
|
||||
(x) => x.length
|
||||
x => x.length
|
||||
) || 0
|
||||
const failed =
|
||||
__guard__(output != null ? output.stdout : undefined, (x2) =>
|
||||
__guard__(output != null ? output.stdout : undefined, x2 =>
|
||||
x2.match(/^Latexmk: Errors/m)
|
||||
) != null
|
||||
? 1
|
||||
@@ -122,21 +122,21 @@ module.exports = LatexRunner = {
|
||||
stderr != null
|
||||
? stderr.match(/Percent of CPU this job got: (\d+)/m)
|
||||
: undefined,
|
||||
(x3) => x3[1]
|
||||
x3 => x3[1]
|
||||
) || 0
|
||||
timings['cpu-time'] =
|
||||
__guard__(
|
||||
stderr != null
|
||||
? stderr.match(/User time.*: (\d+.\d+)/m)
|
||||
: undefined,
|
||||
(x4) => x4[1]
|
||||
x4 => x4[1]
|
||||
) || 0
|
||||
timings['sys-time'] =
|
||||
__guard__(
|
||||
stderr != null
|
||||
? stderr.match(/System time.*: (\d+.\d+)/m)
|
||||
: undefined,
|
||||
(x5) => x5[1]
|
||||
x5 => x5[1]
|
||||
) || 0
|
||||
// record output files
|
||||
LatexRunner.writeLogOutput(project_id, directory, output, () => {
|
||||
@@ -153,7 +153,7 @@ module.exports = LatexRunner = {
|
||||
// internal method for writing non-empty log files
|
||||
function _writeFile(file, content, cb) {
|
||||
if (content && content.length > 0) {
|
||||
fs.writeFile(file, content, (err) => {
|
||||
fs.writeFile(file, content, err => {
|
||||
if (err) {
|
||||
logger.error({ project_id, file }, 'error writing log file') // don't fail on error
|
||||
}
|
||||
@@ -202,7 +202,7 @@ module.exports = LatexRunner = {
|
||||
return (
|
||||
__guard__(
|
||||
Settings != null ? Settings.clsi : undefined,
|
||||
(x) => x.latexmkCommandPrefix
|
||||
x => x.latexmkCommandPrefix
|
||||
) || []
|
||||
).concat(args)
|
||||
},
|
||||
|
||||
@@ -15,7 +15,6 @@
|
||||
*/
|
||||
let CommandRunner
|
||||
const { spawn } = require('child_process')
|
||||
const _ = require('underscore')
|
||||
const logger = require('logger-sharelatex')
|
||||
|
||||
logger.info('using standard command runner')
|
||||
@@ -34,10 +33,8 @@ module.exports = CommandRunner = {
|
||||
let key, value
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
} else {
|
||||
callback = _.once(callback)
|
||||
}
|
||||
command = Array.from(command).map((arg) =>
|
||||
command = Array.from(command).map(arg =>
|
||||
arg.toString().replace('$COMPILE_DIR', directory)
|
||||
)
|
||||
logger.log({ project_id, command, directory }, 'running command')
|
||||
@@ -58,7 +55,7 @@ module.exports = CommandRunner = {
|
||||
const proc = spawn(command[0], command.slice(1), { cwd: directory, env })
|
||||
|
||||
let stdout = ''
|
||||
proc.stdout.setEncoding('utf8').on('data', (data) => (stdout += data))
|
||||
proc.stdout.setEncoding('utf8').on('data', data => (stdout += data))
|
||||
|
||||
proc.on('error', function(err) {
|
||||
logger.err(
|
||||
|
||||
@@ -99,16 +99,13 @@ module.exports = OutputCacheManager = {
|
||||
(Settings.clsi != null ? Settings.clsi.archive_logs : undefined) ||
|
||||
(Settings.clsi != null ? Settings.clsi.strace : undefined)
|
||||
) {
|
||||
OutputCacheManager.archiveLogs(
|
||||
outputFiles,
|
||||
compileDir,
|
||||
buildId,
|
||||
function (err) {
|
||||
OutputCacheManager.archiveLogs(outputFiles, compileDir, buildId, function(
|
||||
err
|
||||
) {
|
||||
if (err != null) {
|
||||
return logger.warn({ err }, 'erroring archiving log files')
|
||||
}
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
// make the new cache directory
|
||||
@@ -283,7 +280,7 @@ module.exports = OutputCacheManager = {
|
||||
// we can get the build time from the first part of the directory name DDDD-RRRR
|
||||
// DDDD is date and RRRR is random bytes
|
||||
const dirTime = parseInt(
|
||||
__guard__(dir.split('-'), (x) => x[0]),
|
||||
__guard__(dir.split('-'), x => x[0]),
|
||||
16
|
||||
)
|
||||
const age = currentTime - dirTime
|
||||
|
||||
@@ -44,7 +44,7 @@ module.exports = OutputFileFinder = {
|
||||
if (!incomingResources[file]) {
|
||||
outputFiles.push({
|
||||
path: file,
|
||||
type: __guard__(file.match(/\.([^\.]+)$/), (x) => x[1])
|
||||
type: __guard__(file.match(/\.([^\.]+)$/), x => x[1])
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -87,7 +87,7 @@ module.exports = OutputFileFinder = {
|
||||
|
||||
const proc = spawn('find', args)
|
||||
let stdout = ''
|
||||
proc.stdout.setEncoding('utf8').on('data', (chunk) => (stdout += chunk))
|
||||
proc.stdout.setEncoding('utf8').on('data', chunk => (stdout += chunk))
|
||||
proc.on('error', callback)
|
||||
return proc.on('close', function(code) {
|
||||
if (code !== 0) {
|
||||
|
||||
@@ -77,7 +77,7 @@ module.exports = OutputFileOptimiser = {
|
||||
const timer = new Metrics.Timer('qpdf')
|
||||
const proc = spawn('qpdf', args)
|
||||
let stdout = ''
|
||||
proc.stdout.setEncoding('utf8').on('data', (chunk) => (stdout += chunk))
|
||||
proc.stdout.setEncoding('utf8').on('data', chunk => (stdout += chunk))
|
||||
callback = _.once(callback) // avoid double call back for error and close event
|
||||
proc.on('error', function(err) {
|
||||
logger.warn({ err, args }, 'qpdf failed')
|
||||
|
||||
@@ -50,7 +50,7 @@ module.exports = ProjectPersistenceManager = {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
const job = (cb) =>
|
||||
const job = cb =>
|
||||
db.Project.findOrCreate({ where: { project_id } })
|
||||
.spread((project, created) =>
|
||||
project
|
||||
@@ -74,8 +74,8 @@ module.exports = ProjectPersistenceManager = {
|
||||
return callback(error)
|
||||
}
|
||||
logger.log({ project_ids }, 'clearing expired projects')
|
||||
const jobs = Array.from(project_ids || []).map((project_id) =>
|
||||
((project_id) => (callback) =>
|
||||
const jobs = Array.from(project_ids || []).map(project_id =>
|
||||
(project_id => callback =>
|
||||
ProjectPersistenceManager.clearProjectFromCache(project_id, function(
|
||||
err
|
||||
) {
|
||||
@@ -91,7 +91,7 @@ module.exports = ProjectPersistenceManager = {
|
||||
}
|
||||
return CompileManager.clearExpiredProjects(
|
||||
ProjectPersistenceManager.EXPIRY_TIMEOUT,
|
||||
(error) => callback()
|
||||
error => callback()
|
||||
)
|
||||
})
|
||||
})
|
||||
@@ -148,7 +148,7 @@ module.exports = ProjectPersistenceManager = {
|
||||
callback = function(error) {}
|
||||
}
|
||||
logger.log({ project_id }, 'clearing project from database')
|
||||
const job = (cb) =>
|
||||
const job = cb =>
|
||||
db.Project.destroy({ where: { project_id } })
|
||||
.then(() => cb())
|
||||
.error(cb)
|
||||
@@ -166,10 +166,10 @@ module.exports = ProjectPersistenceManager = {
|
||||
const q = {}
|
||||
q[db.op.lt] = keepProjectsFrom
|
||||
return db.Project.findAll({ where: { lastAccessed: q } })
|
||||
.then((projects) =>
|
||||
.then(projects =>
|
||||
cb(
|
||||
null,
|
||||
projects.map((project) => project.project_id)
|
||||
projects.map(project => project.project_id)
|
||||
)
|
||||
)
|
||||
.error(cb)
|
||||
|
||||
@@ -61,13 +61,7 @@ module.exports = RequestParser = {
|
||||
response.imageName = this._parseAttribute(
|
||||
'imageName',
|
||||
compile.options.imageName,
|
||||
{
|
||||
type: 'string',
|
||||
validValues:
|
||||
settings.clsi &&
|
||||
settings.clsi.docker &&
|
||||
settings.clsi.docker.allowedImages
|
||||
}
|
||||
{ type: 'string' }
|
||||
)
|
||||
response.draft = this._parseAttribute('draft', compile.options.draft, {
|
||||
default: false,
|
||||
|
||||
@@ -56,9 +56,7 @@ module.exports = ResourceStateManager = {
|
||||
})
|
||||
} else {
|
||||
logger.log({ state, basePath }, 'writing sync state')
|
||||
const resourceList = Array.from(resources).map(
|
||||
(resource) => resource.path
|
||||
)
|
||||
const resourceList = Array.from(resources).map(resource => resource.path)
|
||||
return fs.writeFile(
|
||||
stateFile,
|
||||
[...Array.from(resourceList), `stateHash:${state}`].join('\n'),
|
||||
@@ -88,7 +86,7 @@ module.exports = ResourceStateManager = {
|
||||
)
|
||||
}
|
||||
const array =
|
||||
__guard__(result != null ? result.toString() : undefined, (x) =>
|
||||
__guard__(result != null ? result.toString() : undefined, x =>
|
||||
x.split('\n')
|
||||
) || []
|
||||
const adjustedLength = Math.max(array.length, 1)
|
||||
@@ -104,7 +102,7 @@ module.exports = ResourceStateManager = {
|
||||
new Errors.FilesOutOfSyncError('invalid state for incremental update')
|
||||
)
|
||||
} else {
|
||||
const resources = Array.from(resourceList).map((path) => ({ path }))
|
||||
const resources = Array.from(resourceList).map(path => ({ path }))
|
||||
return callback(null, resources)
|
||||
}
|
||||
})
|
||||
@@ -118,7 +116,7 @@ module.exports = ResourceStateManager = {
|
||||
}
|
||||
for (file of Array.from(resources || [])) {
|
||||
for (const dir of Array.from(
|
||||
__guard__(file != null ? file.path : undefined, (x) => x.split('/'))
|
||||
__guard__(file != null ? file.path : undefined, x => x.split('/'))
|
||||
)) {
|
||||
if (dir === '..') {
|
||||
return callback(new Error('relative path in resource file list'))
|
||||
@@ -131,8 +129,8 @@ module.exports = ResourceStateManager = {
|
||||
seenFile[file] = true
|
||||
}
|
||||
const missingFiles = Array.from(resources)
|
||||
.filter((resource) => !seenFile[resource.path])
|
||||
.map((resource) => resource.path)
|
||||
.filter(resource => !seenFile[resource.path])
|
||||
.map(resource => resource.path)
|
||||
if ((missingFiles != null ? missingFiles.length : undefined) > 0) {
|
||||
logger.err(
|
||||
{ missingFiles, basePath, allFiles, resources },
|
||||
|
||||
@@ -109,13 +109,13 @@ module.exports = ResourceWriter = {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return this._createDirectory(basePath, (error) => {
|
||||
return this._createDirectory(basePath, error => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
const jobs = Array.from(resources).map((resource) =>
|
||||
((resource) => {
|
||||
return (callback) =>
|
||||
const jobs = Array.from(resources).map(resource =>
|
||||
(resource => {
|
||||
return callback =>
|
||||
this._writeResourceToDisk(project_id, resource, basePath, callback)
|
||||
})(resource)
|
||||
)
|
||||
@@ -127,17 +127,17 @@ module.exports = ResourceWriter = {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
return this._createDirectory(basePath, (error) => {
|
||||
return this._createDirectory(basePath, error => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return this._removeExtraneousFiles(resources, basePath, (error) => {
|
||||
return this._removeExtraneousFiles(resources, basePath, error => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
const jobs = Array.from(resources).map((resource) =>
|
||||
((resource) => {
|
||||
return (callback) =>
|
||||
const jobs = Array.from(resources).map(resource =>
|
||||
(resource => {
|
||||
return callback =>
|
||||
this._writeResourceToDisk(
|
||||
project_id,
|
||||
resource,
|
||||
@@ -242,7 +242,7 @@ module.exports = ResourceWriter = {
|
||||
should_delete = true
|
||||
}
|
||||
if (should_delete) {
|
||||
return jobs.push((callback) =>
|
||||
return jobs.push(callback =>
|
||||
ResourceWriter._deleteFileIfNotDirectory(
|
||||
Path.join(basePath, path),
|
||||
callback
|
||||
@@ -303,9 +303,7 @@ module.exports = ResourceWriter = {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return fs.mkdir(Path.dirname(path), { recursive: true }, function (
|
||||
error
|
||||
) {
|
||||
return fs.mkdir(Path.dirname(path), { recursive: true }, function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
|
||||
@@ -26,7 +26,7 @@ module.exports = ForbidSymlinks = function (staticFn, root, options) {
|
||||
const basePath = Path.resolve(root)
|
||||
return function(req, res, next) {
|
||||
let file, project_id, result
|
||||
const path = __guard__(url.parse(req.url), (x) => x.pathname)
|
||||
const path = __guard__(url.parse(req.url), x => x.pathname)
|
||||
// check that the path is of the form /project_id_or_name/path/to/file.log
|
||||
if ((result = path.match(/^\/?([a-zA-Z0-9_-]+)\/(.*)/))) {
|
||||
project_id = result[1]
|
||||
|
||||
@@ -42,10 +42,7 @@ module.exports = TikzManager = {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return SafeReader.readFile(path, 65536, 'utf8', function (
|
||||
error,
|
||||
content
|
||||
) {
|
||||
return SafeReader.readFile(path, 65536, 'utf8', function(error, content) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
|
||||
@@ -60,8 +60,8 @@ module.exports = UrlCache = {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
const jobs = Array.from(urls || []).map((url) =>
|
||||
((url) => (callback) =>
|
||||
const jobs = Array.from(urls || []).map(url =>
|
||||
(url => callback =>
|
||||
UrlCache._clearUrlFromCache(project_id, url, function(error) {
|
||||
if (error != null) {
|
||||
logger.error(
|
||||
@@ -98,7 +98,7 @@ module.exports = UrlCache = {
|
||||
return UrlFetcher.pipeUrlToFileWithRetry(
|
||||
url,
|
||||
UrlCache._cacheFilePathForUrl(project_id, url),
|
||||
(error) => {
|
||||
error => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
@@ -106,7 +106,7 @@ module.exports = UrlCache = {
|
||||
project_id,
|
||||
url,
|
||||
lastModified,
|
||||
(error) => {
|
||||
error => {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
@@ -153,7 +153,14 @@ module.exports = UrlCache = {
|
||||
},
|
||||
|
||||
_cacheFileNameForUrl(project_id, url) {
|
||||
return project_id + ':' + crypto.createHash('md5').update(url).digest('hex')
|
||||
return (
|
||||
project_id +
|
||||
':' +
|
||||
crypto
|
||||
.createHash('md5')
|
||||
.update(url)
|
||||
.digest('hex')
|
||||
)
|
||||
},
|
||||
|
||||
_cacheFilePathForUrl(project_id, url) {
|
||||
@@ -190,9 +197,7 @@ module.exports = UrlCache = {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
return UrlCache._deleteUrlCacheFromDisk(project_id, url, function (
|
||||
error
|
||||
) {
|
||||
return UrlCache._deleteUrlCacheFromDisk(project_id, url, function(error) {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
@@ -221,9 +226,9 @@ module.exports = UrlCache = {
|
||||
if (callback == null) {
|
||||
callback = function(error, urlDetails) {}
|
||||
}
|
||||
const job = (cb) =>
|
||||
const job = cb =>
|
||||
db.UrlCache.findOne({ where: { url, project_id } })
|
||||
.then((urlDetails) => cb(null, urlDetails))
|
||||
.then(urlDetails => cb(null, urlDetails))
|
||||
.error(cb)
|
||||
return dbQueue.queue.push(job, callback)
|
||||
},
|
||||
@@ -232,7 +237,7 @@ module.exports = UrlCache = {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
const job = (cb) =>
|
||||
const job = cb =>
|
||||
db.UrlCache.findOrCreate({ where: { url, project_id } })
|
||||
.spread((urlDetails, created) =>
|
||||
urlDetails
|
||||
@@ -248,7 +253,7 @@ module.exports = UrlCache = {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
const job = (cb) =>
|
||||
const job = cb =>
|
||||
db.UrlCache.destroy({ where: { url, project_id } })
|
||||
.then(() => cb(null))
|
||||
.error(cb)
|
||||
@@ -259,12 +264,12 @@ module.exports = UrlCache = {
|
||||
if (callback == null) {
|
||||
callback = function(error, urls) {}
|
||||
}
|
||||
const job = (cb) =>
|
||||
const job = cb =>
|
||||
db.UrlCache.findAll({ where: { project_id } })
|
||||
.then((urlEntries) =>
|
||||
.then(urlEntries =>
|
||||
cb(
|
||||
null,
|
||||
urlEntries.map((entry) => entry.url)
|
||||
urlEntries.map(entry => entry.url)
|
||||
)
|
||||
)
|
||||
.error(cb)
|
||||
|
||||
@@ -62,6 +62,6 @@ module.exports = {
|
||||
return sequelize
|
||||
.sync()
|
||||
.then(() => logger.log('db sync complete'))
|
||||
.catch((err) => console.log(err, 'error syncing'))
|
||||
.catch(err => console.log(err, 'error syncing'))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
clsi
|
||||
--acceptance-creds=None
|
||||
--data-dirs=cache,compiles,db
|
||||
--dependencies=
|
||||
--docker-repos=gcr.io/overleaf-ops
|
||||
--env-add=
|
||||
--env-pass-through=TEXLIVE_IMAGE
|
||||
--node-version=10.22.1
|
||||
--language=es
|
||||
--node-version=10.21.0
|
||||
--public-repo=True
|
||||
--script-version=3.3.3
|
||||
--script-version=2.1.0
|
||||
|
||||
@@ -129,17 +129,6 @@ if (process.env.DOCKER_RUNNER) {
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (process.env.ALLOWED_IMAGES) {
|
||||
try {
|
||||
module.exports.clsi.docker.allowedImages = process.env.ALLOWED_IMAGES.split(
|
||||
' '
|
||||
)
|
||||
} catch (error) {
|
||||
console.error(error, 'could not apply allowed images setting')
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.path.synctexBaseDir = () => '/compile'
|
||||
|
||||
module.exports.path.sandboxedCompilesHostDir = process.env.COMPILES_HOST_DIR
|
||||
|
||||
@@ -3,7 +3,6 @@ version: "2.3"
|
||||
services:
|
||||
dev:
|
||||
environment:
|
||||
ALLOWED_IMAGES: "quay.io/sharelatex/texlive-full:2017.1"
|
||||
TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1
|
||||
TEXLIVE_IMAGE_USER: "tex"
|
||||
SHARELATEX_CONFIG: /app/config/settings.defaults.coffee
|
||||
@@ -19,7 +18,6 @@ services:
|
||||
|
||||
ci:
|
||||
environment:
|
||||
ALLOWED_IMAGES: ${TEXLIVE_IMAGE}
|
||||
TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1
|
||||
TEXLIVE_IMAGE_USER: "tex"
|
||||
SHARELATEX_CONFIG: /app/config/settings.defaults.coffee
|
||||
|
||||
@@ -10,7 +10,6 @@ services:
|
||||
command: npm run test:unit:_run
|
||||
environment:
|
||||
NODE_ENV: test
|
||||
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||
|
||||
|
||||
test_acceptance:
|
||||
@@ -26,7 +25,6 @@ services:
|
||||
POSTGRES_HOST: postgres
|
||||
MOCHA_GREP: ${MOCHA_GREP}
|
||||
NODE_ENV: test
|
||||
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||
TEXLIVE_IMAGE:
|
||||
command: npm run test:acceptance:_run
|
||||
|
||||
|
||||
@@ -15,8 +15,7 @@ services:
|
||||
environment:
|
||||
MOCHA_GREP: ${MOCHA_GREP}
|
||||
NODE_ENV: test
|
||||
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||
command: npm run --silent test:unit
|
||||
command: npm run test:unit
|
||||
|
||||
test_acceptance:
|
||||
build:
|
||||
@@ -36,6 +35,5 @@ services:
|
||||
MOCHA_GREP: ${MOCHA_GREP}
|
||||
LOG_LEVEL: ERROR
|
||||
NODE_ENV: test
|
||||
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||
command: npm run --silent test:acceptance
|
||||
command: npm run test:acceptance
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"execMap": {
|
||||
"js": "npm run start"
|
||||
},
|
||||
|
||||
"watch": [
|
||||
"app/js/",
|
||||
"app.js",
|
||||
|
||||
399
package-lock.json
generated
399
package-lock.json
generated
@@ -176,43 +176,6 @@
|
||||
"google-auth-library": "^5.5.0",
|
||||
"retry-request": "^4.0.0",
|
||||
"teeny-request": "^6.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"google-auth-library": {
|
||||
"version": "5.10.1",
|
||||
"resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.10.1.tgz",
|
||||
"integrity": "sha512-rOlaok5vlpV9rSiUu5EpR0vVpc+PhN62oF4RyX/6++DG1VsaulAFEMlDYBLjJDDPI6OcNOCGAKy9UVB/3NIDXg==",
|
||||
"requires": {
|
||||
"arrify": "^2.0.0",
|
||||
"base64-js": "^1.3.0",
|
||||
"ecdsa-sig-formatter": "^1.0.11",
|
||||
"fast-text-encoding": "^1.0.0",
|
||||
"gaxios": "^2.1.0",
|
||||
"gcp-metadata": "^3.4.0",
|
||||
"gtoken": "^4.1.0",
|
||||
"jws": "^4.0.0",
|
||||
"lru-cache": "^5.0.0"
|
||||
}
|
||||
},
|
||||
"jwa": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz",
|
||||
"integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==",
|
||||
"requires": {
|
||||
"buffer-equal-constant-time": "1.0.1",
|
||||
"ecdsa-sig-formatter": "1.0.11",
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
"jws": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz",
|
||||
"integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==",
|
||||
"requires": {
|
||||
"jwa": "^2.0.0",
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"@google-cloud/debug-agent": {
|
||||
@@ -408,41 +371,6 @@
|
||||
"type-fest": "^0.12.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"google-auth-library": {
|
||||
"version": "5.10.1",
|
||||
"resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.10.1.tgz",
|
||||
"integrity": "sha512-rOlaok5vlpV9rSiUu5EpR0vVpc+PhN62oF4RyX/6++DG1VsaulAFEMlDYBLjJDDPI6OcNOCGAKy9UVB/3NIDXg==",
|
||||
"requires": {
|
||||
"arrify": "^2.0.0",
|
||||
"base64-js": "^1.3.0",
|
||||
"ecdsa-sig-formatter": "^1.0.11",
|
||||
"fast-text-encoding": "^1.0.0",
|
||||
"gaxios": "^2.1.0",
|
||||
"gcp-metadata": "^3.4.0",
|
||||
"gtoken": "^4.1.0",
|
||||
"jws": "^4.0.0",
|
||||
"lru-cache": "^5.0.0"
|
||||
}
|
||||
},
|
||||
"jwa": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz",
|
||||
"integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==",
|
||||
"requires": {
|
||||
"buffer-equal-constant-time": "1.0.1",
|
||||
"ecdsa-sig-formatter": "1.0.11",
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
"jws": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz",
|
||||
"integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==",
|
||||
"requires": {
|
||||
"jwa": "^2.0.0",
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
"type-fest": {
|
||||
"version": "0.12.0",
|
||||
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.12.0.tgz",
|
||||
@@ -451,12 +379,12 @@
|
||||
}
|
||||
},
|
||||
"@google-cloud/logging-bunyan": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@google-cloud/logging-bunyan/-/logging-bunyan-3.0.0.tgz",
|
||||
"integrity": "sha512-ZLVXEejNQ27ktGcA3S/sd7GPefp7kywbn+/KoBajdb1Syqcmtc98jhXpYQBXVtNP2065iyu77s4SBaiYFbTC5A==",
|
||||
"version": "2.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@google-cloud/logging-bunyan/-/logging-bunyan-2.0.3.tgz",
|
||||
"integrity": "sha512-8n9MwsCRd4v8WZg17+d3m7qInud7lYTm5rpwXHY0/lzWEJYjeiztT09BiCYh56EEhHr+ynymJnzUDZKazkywlg==",
|
||||
"requires": {
|
||||
"@google-cloud/logging": "^7.0.0",
|
||||
"google-auth-library": "^6.0.0"
|
||||
"google-auth-library": "^5.0.0"
|
||||
}
|
||||
},
|
||||
"@google-cloud/paginator": {
|
||||
@@ -822,9 +750,9 @@
|
||||
}
|
||||
},
|
||||
"@grpc/proto-loader": {
|
||||
"version": "0.5.5",
|
||||
"resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.5.5.tgz",
|
||||
"integrity": "sha512-WwN9jVNdHRQoOBo9FDH7qU+mgfjPc8GygPYms3M+y3fbQLfnCe/Kv/E01t7JRgnrsOHH8euvSbed3mIalXhwqQ==",
|
||||
"version": "0.5.4",
|
||||
"resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.5.4.tgz",
|
||||
"integrity": "sha512-HTM4QpI9B2XFkPz7pjwMyMgZchJ93TVkL3kWPW8GDMDKYxsMnmf4w2TNMJK7+KNiYHS5cJrCEAFlF+AwtXWVPA==",
|
||||
"requires": {
|
||||
"lodash.camelcase": "^4.3.0",
|
||||
"protobufjs": "^6.8.6"
|
||||
@@ -1308,7 +1236,7 @@
|
||||
"asn1": {
|
||||
"version": "0.2.4",
|
||||
"resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz",
|
||||
"integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==",
|
||||
"integrity": "sha1-jSR136tVO7M+d7VOWeiAu4ziMTY=",
|
||||
"requires": {
|
||||
"safer-buffer": "~2.1.0"
|
||||
}
|
||||
@@ -1437,20 +1365,13 @@
|
||||
"integrity": "sha1-DmVcm5wkNeqraL9AJyJtK1WjRSQ="
|
||||
},
|
||||
"bl": {
|
||||
"version": "4.0.3",
|
||||
"resolved": "https://registry.npmjs.org/bl/-/bl-4.0.3.tgz",
|
||||
"integrity": "sha512-fs4G6/Hu4/EE+F75J8DuN/0IpQqNjAdC7aEQv7Qt8MHGUH7Ckv2MwTEEeN9QehD0pfIDkMI1bkHYkKy7xHyKIg==",
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/bl/-/bl-4.0.1.tgz",
|
||||
"integrity": "sha512-FL/TdvchukRCuWVxT0YMO/7+L5TNeNrVFvRU2IY63aUyv9mpt8splf2NEr6qXtPo5fya5a66YohQKvGNmLrWNA==",
|
||||
"requires": {
|
||||
"buffer": "^5.5.0",
|
||||
"inherits": "^2.0.4",
|
||||
"readable-stream": "^3.4.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"inherits": {
|
||||
"version": "2.0.4",
|
||||
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
|
||||
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
|
||||
},
|
||||
"readable-stream": {
|
||||
"version": "3.6.0",
|
||||
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
|
||||
@@ -1525,15 +1446,6 @@
|
||||
"integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==",
|
||||
"dev": true
|
||||
},
|
||||
"buffer": {
|
||||
"version": "5.6.0",
|
||||
"resolved": "https://registry.npmjs.org/buffer/-/buffer-5.6.0.tgz",
|
||||
"integrity": "sha512-/gDYp/UtU0eA1ys8bOs9J6a+E/KWIY+DZ+Q2WESNUA0jFRsJOc0SNUO6xJ5SGA1xueg3NL65W6s+NY5l9cunuw==",
|
||||
"requires": {
|
||||
"base64-js": "^1.0.2",
|
||||
"ieee754": "^1.1.4"
|
||||
}
|
||||
},
|
||||
"buffer-equal-constant-time": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
|
||||
@@ -1542,7 +1454,7 @@
|
||||
"buffer-from": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz",
|
||||
"integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A=="
|
||||
"integrity": "sha1-MnE7wCj3XAL9txDXx7zsHyxgcO8="
|
||||
},
|
||||
"builtin-modules": {
|
||||
"version": "3.1.0",
|
||||
@@ -1553,7 +1465,6 @@
|
||||
"version": "1.8.12",
|
||||
"resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.12.tgz",
|
||||
"integrity": "sha1-8VDw9nSKvdcq6uhPBEA74u8RN5c=",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"dtrace-provider": "~0.8",
|
||||
"moment": "^2.10.6",
|
||||
@@ -1642,11 +1553,6 @@
|
||||
"integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==",
|
||||
"dev": true
|
||||
},
|
||||
"charenc": {
|
||||
"version": "0.0.2",
|
||||
"resolved": "https://registry.npmjs.org/charenc/-/charenc-0.0.2.tgz",
|
||||
"integrity": "sha1-wKHS86cJLgN3S/qD8UwPxXkKhmc="
|
||||
},
|
||||
"check-error": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz",
|
||||
@@ -1843,7 +1749,7 @@
|
||||
"content-type": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz",
|
||||
"integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA=="
|
||||
"integrity": "sha1-4TjMdeBAxyexlm/l5fjJruJW/js="
|
||||
},
|
||||
"continuation-local-storage": {
|
||||
"version": "3.2.1",
|
||||
@@ -1894,11 +1800,6 @@
|
||||
"which": "^1.2.9"
|
||||
}
|
||||
},
|
||||
"crypt": {
|
||||
"version": "0.0.2",
|
||||
"resolved": "https://registry.npmjs.org/crypt/-/crypt-0.0.2.tgz",
|
||||
"integrity": "sha1-iNf/fsDfuG9xPch7u0LQRNPmxBs="
|
||||
},
|
||||
"d64": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/d64/-/d64-1.0.0.tgz",
|
||||
@@ -2963,7 +2864,7 @@
|
||||
"form-data": {
|
||||
"version": "2.3.3",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz",
|
||||
"integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==",
|
||||
"integrity": "sha1-3M5SwF9kTymManq5Nr1yTO/786Y=",
|
||||
"requires": {
|
||||
"asynckit": "^0.4.0",
|
||||
"combined-stream": "^1.0.6",
|
||||
@@ -2983,7 +2884,7 @@
|
||||
"fs-constants": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz",
|
||||
"integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow=="
|
||||
"integrity": "sha1-a+Dem+mYzhavivwkSXue6bfM2a0="
|
||||
},
|
||||
"fs-extra": {
|
||||
"version": "8.1.0",
|
||||
@@ -3118,74 +3019,21 @@
|
||||
"dev": true
|
||||
},
|
||||
"google-auth-library": {
|
||||
"version": "6.0.6",
|
||||
"resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.0.6.tgz",
|
||||
"integrity": "sha512-fWYdRdg55HSJoRq9k568jJA1lrhg9i2xgfhVIMJbskUmbDpJGHsbv9l41DGhCDXM21F9Kn4kUwdysgxSYBYJUw==",
|
||||
"version": "5.10.1",
|
||||
"resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.10.1.tgz",
|
||||
"integrity": "sha512-rOlaok5vlpV9rSiUu5EpR0vVpc+PhN62oF4RyX/6++DG1VsaulAFEMlDYBLjJDDPI6OcNOCGAKy9UVB/3NIDXg==",
|
||||
"requires": {
|
||||
"arrify": "^2.0.0",
|
||||
"base64-js": "^1.3.0",
|
||||
"ecdsa-sig-formatter": "^1.0.11",
|
||||
"fast-text-encoding": "^1.0.0",
|
||||
"gaxios": "^3.0.0",
|
||||
"gcp-metadata": "^4.1.0",
|
||||
"gtoken": "^5.0.0",
|
||||
"gaxios": "^2.1.0",
|
||||
"gcp-metadata": "^3.4.0",
|
||||
"gtoken": "^4.1.0",
|
||||
"jws": "^4.0.0",
|
||||
"lru-cache": "^6.0.0"
|
||||
"lru-cache": "^5.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"bignumber.js": {
|
||||
"version": "9.0.0",
|
||||
"resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.0.tgz",
|
||||
"integrity": "sha512-t/OYhhJ2SD+YGBQcjY8GzzDHEk9f3nerxjtfa6tlMXfe7frs/WozhvCNoGvpM0P3bNf3Gq5ZRMlGr5f3r4/N8A=="
|
||||
},
|
||||
"gaxios": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.1.0.tgz",
|
||||
"integrity": "sha512-DDTn3KXVJJigtz+g0J3vhcfbDbKtAroSTxauWsdnP57sM5KZ3d2c/3D9RKFJ86s43hfw6WULg6TXYw/AYiBlpA==",
|
||||
"requires": {
|
||||
"abort-controller": "^3.0.0",
|
||||
"extend": "^3.0.2",
|
||||
"https-proxy-agent": "^5.0.0",
|
||||
"is-stream": "^2.0.0",
|
||||
"node-fetch": "^2.3.0"
|
||||
}
|
||||
},
|
||||
"gcp-metadata": {
|
||||
"version": "4.1.4",
|
||||
"resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.1.4.tgz",
|
||||
"integrity": "sha512-5J/GIH0yWt/56R3dNaNWPGQ/zXsZOddYECfJaqxFWgrZ9HC2Kvc5vl9upOgUUHKzURjAVf2N+f6tEJiojqXUuA==",
|
||||
"requires": {
|
||||
"gaxios": "^3.0.0",
|
||||
"json-bigint": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"google-p12-pem": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.2.tgz",
|
||||
"integrity": "sha512-tbjzndQvSIHGBLzHnhDs3cL4RBjLbLXc2pYvGH+imGVu5b4RMAttUTdnmW2UH0t11QeBTXZ7wlXPS7hrypO/tg==",
|
||||
"requires": {
|
||||
"node-forge": "^0.9.0"
|
||||
}
|
||||
},
|
||||
"gtoken": {
|
||||
"version": "5.0.3",
|
||||
"resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.0.3.tgz",
|
||||
"integrity": "sha512-Nyd1wZCMRc2dj/mAD0LlfQLcAO06uKdpKJXvK85SGrF5+5+Bpfil9u/2aw35ltvEHjvl0h5FMKN5knEU+9JrOg==",
|
||||
"requires": {
|
||||
"gaxios": "^3.0.0",
|
||||
"google-p12-pem": "^3.0.0",
|
||||
"jws": "^4.0.0",
|
||||
"mime": "^2.2.0"
|
||||
}
|
||||
},
|
||||
"json-bigint": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz",
|
||||
"integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==",
|
||||
"requires": {
|
||||
"bignumber.js": "^9.0.0"
|
||||
}
|
||||
},
|
||||
"jwa": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz",
|
||||
@@ -3204,24 +3052,6 @@
|
||||
"jwa": "^2.0.0",
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
"lru-cache": {
|
||||
"version": "6.0.0",
|
||||
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
|
||||
"integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
|
||||
"requires": {
|
||||
"yallist": "^4.0.0"
|
||||
}
|
||||
},
|
||||
"mime": {
|
||||
"version": "2.4.6",
|
||||
"resolved": "https://registry.npmjs.org/mime/-/mime-2.4.6.tgz",
|
||||
"integrity": "sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA=="
|
||||
},
|
||||
"yallist": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
|
||||
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -3248,49 +3078,14 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@types/node": {
|
||||
"version": "13.13.15",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.15.tgz",
|
||||
"integrity": "sha512-kwbcs0jySLxzLsa2nWUAGOd/s21WU1jebrEdtzhsj1D4Yps1EOuyI1Qcu+FD56dL7NRNIJtDDjcqIG22NwkgLw=="
|
||||
},
|
||||
"google-auth-library": {
|
||||
"version": "5.10.1",
|
||||
"resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.10.1.tgz",
|
||||
"integrity": "sha512-rOlaok5vlpV9rSiUu5EpR0vVpc+PhN62oF4RyX/6++DG1VsaulAFEMlDYBLjJDDPI6OcNOCGAKy9UVB/3NIDXg==",
|
||||
"requires": {
|
||||
"arrify": "^2.0.0",
|
||||
"base64-js": "^1.3.0",
|
||||
"ecdsa-sig-formatter": "^1.0.11",
|
||||
"fast-text-encoding": "^1.0.0",
|
||||
"gaxios": "^2.1.0",
|
||||
"gcp-metadata": "^3.4.0",
|
||||
"gtoken": "^4.1.0",
|
||||
"jws": "^4.0.0",
|
||||
"lru-cache": "^5.0.0"
|
||||
}
|
||||
},
|
||||
"jwa": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz",
|
||||
"integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==",
|
||||
"requires": {
|
||||
"buffer-equal-constant-time": "1.0.1",
|
||||
"ecdsa-sig-formatter": "1.0.11",
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
"jws": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz",
|
||||
"integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==",
|
||||
"requires": {
|
||||
"jwa": "^2.0.0",
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
"version": "13.13.12",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.12.tgz",
|
||||
"integrity": "sha512-zWz/8NEPxoXNT9YyF2osqyA9WjssZukYpgI4UYZpOjcyqwIUqWGkcCionaEb9Ki+FULyPyvNFpg/329Kd2/pbw=="
|
||||
},
|
||||
"protobufjs": {
|
||||
"version": "6.10.1",
|
||||
"resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.1.tgz",
|
||||
"integrity": "sha512-pb8kTchL+1Ceg4lFd5XUpK8PdWacbvV5SK2ULH2ebrYtl4GjJmS24m6CKME67jzV53tbJxHlnNOSqQHbTsR9JQ==",
|
||||
"version": "6.9.0",
|
||||
"resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.9.0.tgz",
|
||||
"integrity": "sha512-LlGVfEWDXoI/STstRDdZZKb/qusoAWUnmLg9R8OLSO473mBLWHowx8clbX5/+mKDEI+v7GzjoK9tRPZMMcoTrg==",
|
||||
"requires": {
|
||||
"@protobufjs/aspromise": "^1.1.2",
|
||||
"@protobufjs/base64": "^1.1.2",
|
||||
@@ -3379,7 +3174,7 @@
|
||||
"har-validator": {
|
||||
"version": "5.1.3",
|
||||
"resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz",
|
||||
"integrity": "sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g==",
|
||||
"integrity": "sha1-HvievT5JllV2de7ZiTEQ3DUPoIA=",
|
||||
"requires": {
|
||||
"ajv": "^6.5.5",
|
||||
"har-schema": "^2.0.0"
|
||||
@@ -3475,9 +3270,9 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"agent-base": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.1.tgz",
|
||||
"integrity": "sha512-01q25QQDwLSsyfhrKbn8yuur+JNw0H+0Y4JiGIKd3z9aYk/w/2kxD/Upc+t2ZBBSUNff50VjPsSW2YxM8QYKVg==",
|
||||
"version": "6.0.0",
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.0.tgz",
|
||||
"integrity": "sha512-j1Q7cSCqN+AwrmDd+pzgqc0/NpC655x2bUf5ZjRIO77DcNBFmh+OgRNzF6OKdCC9RSCb19fGd99+bhXFdkRNqw==",
|
||||
"requires": {
|
||||
"debug": "4"
|
||||
}
|
||||
@@ -3517,9 +3312,9 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"agent-base": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.1.tgz",
|
||||
"integrity": "sha512-01q25QQDwLSsyfhrKbn8yuur+JNw0H+0Y4JiGIKd3z9aYk/w/2kxD/Upc+t2ZBBSUNff50VjPsSW2YxM8QYKVg==",
|
||||
"version": "6.0.0",
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.0.tgz",
|
||||
"integrity": "sha512-j1Q7cSCqN+AwrmDd+pzgqc0/NpC655x2bUf5ZjRIO77DcNBFmh+OgRNzF6OKdCC9RSCb19fGd99+bhXFdkRNqw==",
|
||||
"requires": {
|
||||
"debug": "4"
|
||||
}
|
||||
@@ -3547,11 +3342,6 @@
|
||||
"safer-buffer": ">= 2.1.2 < 3"
|
||||
}
|
||||
},
|
||||
"ieee754": {
|
||||
"version": "1.1.13",
|
||||
"resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.13.tgz",
|
||||
"integrity": "sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg=="
|
||||
},
|
||||
"ignore": {
|
||||
"version": "4.0.6",
|
||||
"resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz",
|
||||
@@ -4050,9 +3840,9 @@
|
||||
}
|
||||
},
|
||||
"lodash": {
|
||||
"version": "4.17.20",
|
||||
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.20.tgz",
|
||||
"integrity": "sha512-PlhdFcillOINfeV7Ni6oF1TAEayyZBoZ8bcshTHqOYJYlrqzRK5hagpagky5o4HfCzzd1TRkXPMFq6cKk9rGmA=="
|
||||
"version": "4.17.15",
|
||||
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz",
|
||||
"integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A=="
|
||||
},
|
||||
"lodash.at": {
|
||||
"version": "4.6.0",
|
||||
@@ -4113,34 +3903,15 @@
|
||||
}
|
||||
},
|
||||
"logger-sharelatex": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-2.2.0.tgz",
|
||||
"integrity": "sha512-ko+OmE25XHJJCiz1R9EgwlfM7J/5olpunUfR3WcfuqOQrcUqsdBrDA2sOytngT0ViwjCR0Fh4qZVPwEWfmrvwA==",
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/logger-sharelatex/-/logger-sharelatex-2.1.0.tgz",
|
||||
"integrity": "sha512-WgAABqnBMOv0VAwQJyw+fY4rpqKX+nPJNRydGEYZTo+6lKtJI2TttmZ+Coryg1LEzAjNagxoU78XOHKQvhg7qg==",
|
||||
"requires": {
|
||||
"@google-cloud/logging-bunyan": "^3.0.0",
|
||||
"@google-cloud/logging-bunyan": "^2.0.0",
|
||||
"@overleaf/o-error": "^3.0.0",
|
||||
"bunyan": "^1.8.14",
|
||||
"node-fetch": "^2.6.0",
|
||||
"raven": "^2.6.4",
|
||||
"yn": "^4.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"bunyan": {
|
||||
"version": "1.8.14",
|
||||
"resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.14.tgz",
|
||||
"integrity": "sha512-LlahJUxXzZLuw/hetUQJmRgZ1LF6+cr5TPpRj6jf327AsiIq2jhYEH4oqUUkVKTor+9w2BT3oxVwhzE5lw9tcg==",
|
||||
"requires": {
|
||||
"dtrace-provider": "~0.8",
|
||||
"moment": "^2.19.3",
|
||||
"mv": "~2",
|
||||
"safe-json-stringify": "~1"
|
||||
}
|
||||
},
|
||||
"yn": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/yn/-/yn-4.0.0.tgz",
|
||||
"integrity": "sha512-huWiiCS4TxKc4SfgmTwW1K7JmXPPAmuXWYy4j9qjQo4+27Kni8mGhAAi1cloRWmBe2EqcLgt3IGqQoRL/MtPgg=="
|
||||
}
|
||||
"bunyan": "1.8.12",
|
||||
"raven": "1.1.3",
|
||||
"yn": "^3.1.1"
|
||||
}
|
||||
},
|
||||
"loglevel": {
|
||||
@@ -4208,6 +3979,11 @@
|
||||
"yallist": "^3.0.2"
|
||||
}
|
||||
},
|
||||
"lsmod": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/lsmod/-/lsmod-1.0.0.tgz",
|
||||
"integrity": "sha1-mgD3bco26yP6BTUK/htYXUKZ5ks="
|
||||
},
|
||||
"lynx": {
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/lynx/-/lynx-0.2.0.tgz",
|
||||
@@ -4240,23 +4016,6 @@
|
||||
"resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.1.0.tgz",
|
||||
"integrity": "sha512-glc9y00wgtwcDmp7GaE/0b0OnxpNJsVf3ael/An6Fe2Q51LLwN1er6sdomLRzz5h0+yMpiYLhWYF5R7HeqVd4g=="
|
||||
},
|
||||
"md5": {
|
||||
"version": "2.3.0",
|
||||
"resolved": "https://registry.npmjs.org/md5/-/md5-2.3.0.tgz",
|
||||
"integrity": "sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==",
|
||||
"requires": {
|
||||
"charenc": "0.0.2",
|
||||
"crypt": "0.0.2",
|
||||
"is-buffer": "~1.1.6"
|
||||
},
|
||||
"dependencies": {
|
||||
"is-buffer": {
|
||||
"version": "1.1.6",
|
||||
"resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz",
|
||||
"integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w=="
|
||||
}
|
||||
}
|
||||
},
|
||||
"media-typer": {
|
||||
"version": "0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
|
||||
@@ -4683,9 +4442,9 @@
|
||||
}
|
||||
},
|
||||
"node-fetch": {
|
||||
"version": "2.6.1",
|
||||
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.1.tgz",
|
||||
"integrity": "sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw=="
|
||||
"version": "2.6.0",
|
||||
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz",
|
||||
"integrity": "sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA=="
|
||||
},
|
||||
"node-forge": {
|
||||
"version": "0.9.1",
|
||||
@@ -4769,7 +4528,7 @@
|
||||
"oauth-sign": {
|
||||
"version": "0.9.0",
|
||||
"resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz",
|
||||
"integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ=="
|
||||
"integrity": "sha1-R6ewFrqmi1+g7PPe4IqFxnmsZFU="
|
||||
},
|
||||
"object-assign": {
|
||||
"version": "4.1.1",
|
||||
@@ -5064,9 +4823,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"prettier": {
|
||||
"version": "2.0.5",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-2.0.5.tgz",
|
||||
"integrity": "sha512-7PtVymN48hGcO4fGjybyBSIWDsLU4H4XlvOHfq91pz9kkGlonzwTfYkaIEwiRg/dAJF9YlbsduBAgtYLi+8cFg==",
|
||||
"version": "1.19.1",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-1.19.1.tgz",
|
||||
"integrity": "sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew==",
|
||||
"dev": true
|
||||
},
|
||||
"prettier-eslint": {
|
||||
@@ -5298,12 +5057,6 @@
|
||||
"mimic-fn": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"prettier": {
|
||||
"version": "1.19.1",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-1.19.1.tgz",
|
||||
"integrity": "sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew==",
|
||||
"dev": true
|
||||
},
|
||||
"restore-cursor": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz",
|
||||
@@ -5836,26 +5589,21 @@
|
||||
"integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg=="
|
||||
},
|
||||
"raven": {
|
||||
"version": "2.6.4",
|
||||
"resolved": "https://registry.npmjs.org/raven/-/raven-2.6.4.tgz",
|
||||
"integrity": "sha512-6PQdfC4+DQSFncowthLf+B6Hr0JpPsFBgTVYTAOq7tCmx/kR4SXbeawtPch20+3QfUcQDoJBLjWW1ybvZ4kXTw==",
|
||||
"version": "1.1.3",
|
||||
"resolved": "https://registry.npmjs.org/raven/-/raven-1.1.3.tgz",
|
||||
"integrity": "sha1-QnPBrm005CMPUbLAEEGjK5Iygio=",
|
||||
"requires": {
|
||||
"cookie": "0.3.1",
|
||||
"md5": "^2.2.1",
|
||||
"stack-trace": "0.0.10",
|
||||
"timed-out": "4.0.1",
|
||||
"uuid": "3.3.2"
|
||||
"json-stringify-safe": "5.0.1",
|
||||
"lsmod": "1.0.0",
|
||||
"stack-trace": "0.0.9",
|
||||
"uuid": "3.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"stack-trace": {
|
||||
"version": "0.0.10",
|
||||
"resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz",
|
||||
"integrity": "sha1-VHxws0fo0ytOEI6hoqFZ5f3eGcA="
|
||||
},
|
||||
"uuid": {
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz",
|
||||
"integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA=="
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-3.0.0.tgz",
|
||||
"integrity": "sha1-Zyj8BFnEUNeWqZwxg3VpvfZy1yg="
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -6501,8 +6249,7 @@
|
||||
"stack-trace": {
|
||||
"version": "0.0.9",
|
||||
"resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz",
|
||||
"integrity": "sha1-qPbq7KkGdMMz58Q5U/J1tFFRBpU=",
|
||||
"dev": true
|
||||
"integrity": "sha1-qPbq7KkGdMMz58Q5U/J1tFFRBpU="
|
||||
},
|
||||
"statsd-parser": {
|
||||
"version": "0.0.4",
|
||||
@@ -6808,11 +6555,6 @@
|
||||
"readable-stream": "2 || 3"
|
||||
}
|
||||
},
|
||||
"timed-out": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/timed-out/-/timed-out-4.0.1.tgz",
|
||||
"integrity": "sha1-8y6srFoXW+ol1/q1Zas+2HQe9W8="
|
||||
},
|
||||
"timekeeper": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/timekeeper/-/timekeeper-2.2.0.tgz",
|
||||
@@ -6934,11 +6676,6 @@
|
||||
"integrity": "sha512-/P5lkRXkWHNAbcJIiHPfRoKqyd7bsyCma1hZNUGfn20qm64T6ZBlrzprymeu918H+mB/0rIg2gGK/BXkhhYgBw==",
|
||||
"dev": true
|
||||
},
|
||||
"underscore": {
|
||||
"version": "1.11.0",
|
||||
"resolved": "https://registry.npmjs.org/underscore/-/underscore-1.11.0.tgz",
|
||||
"integrity": "sha512-xY96SsN3NA461qIRKZ/+qox37YXPtSBswMGfiNptr+wrt6ds4HaMw23TP612fEyGekRE6LNRiLYr/aqbHXNedw=="
|
||||
},
|
||||
"universalify": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js",
|
||||
"test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP",
|
||||
"nodemon": "nodemon --config nodemon.json",
|
||||
"lint": "node_modules/.bin/eslint --max-warnings 0 .",
|
||||
"lint": "node_modules/.bin/eslint .",
|
||||
"format": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --list-different",
|
||||
"format:fix": "node_modules/.bin/prettier-eslint $PWD'/**/*.js' --write"
|
||||
},
|
||||
@@ -27,8 +27,8 @@
|
||||
"fs-extra": "^8.1.0",
|
||||
"heapdump": "^0.3.15",
|
||||
"lockfile": "^1.0.4",
|
||||
"lodash": "^4.17.20",
|
||||
"logger-sharelatex": "^2.2.0",
|
||||
"lodash": "^4.17.15",
|
||||
"logger-sharelatex": "^2.1.0",
|
||||
"lynx": "0.2.0",
|
||||
"metrics-sharelatex": "^2.6.0",
|
||||
"mysql": "^2.18.1",
|
||||
@@ -36,7 +36,6 @@
|
||||
"sequelize": "^5.21.5",
|
||||
"settings-sharelatex": "git+https://github.com/sharelatex/settings-sharelatex.git#v1.1.0",
|
||||
"sqlite3": "^4.1.1",
|
||||
"underscore": "^1.11.0",
|
||||
"v8-profiler-node8": "^6.1.1",
|
||||
"wrench": "~1.5.9"
|
||||
},
|
||||
@@ -60,7 +59,7 @@
|
||||
"eslint-plugin-react": "^7.19.0",
|
||||
"eslint-plugin-standard": "^4.0.1",
|
||||
"mocha": "^7.1.0",
|
||||
"prettier": "^2.0.0",
|
||||
"prettier": "^1.19.1",
|
||||
"prettier-eslint-cli": "^5.0.0",
|
||||
"sandboxed-module": "^2.0.3",
|
||||
"sinon": "~9.0.1",
|
||||
|
||||
@@ -1,102 +0,0 @@
|
||||
const Client = require('./helpers/Client')
|
||||
const ClsiApp = require('./helpers/ClsiApp')
|
||||
const { expect } = require('chai')
|
||||
|
||||
describe('AllowedImageNames', function () {
|
||||
beforeEach(function (done) {
|
||||
this.project_id = Client.randomId()
|
||||
this.request = {
|
||||
options: {
|
||||
imageName: undefined
|
||||
},
|
||||
resources: [
|
||||
{
|
||||
path: 'main.tex',
|
||||
content: `\
|
||||
\\documentclass{article}
|
||||
\\begin{document}
|
||||
Hello world
|
||||
\\end{document}\
|
||||
`
|
||||
}
|
||||
]
|
||||
}
|
||||
ClsiApp.ensureRunning(done)
|
||||
})
|
||||
|
||||
describe('with a valid name', function () {
|
||||
beforeEach(function (done) {
|
||||
this.request.options.imageName = process.env.TEXLIVE_IMAGE
|
||||
|
||||
Client.compile(this.project_id, this.request, (error, res, body) => {
|
||||
this.error = error
|
||||
this.res = res
|
||||
this.body = body
|
||||
done(error)
|
||||
})
|
||||
})
|
||||
it('should return success', function () {
|
||||
expect(this.res.statusCode).to.equal(200)
|
||||
})
|
||||
|
||||
it('should return a PDF', function () {
|
||||
let pdf
|
||||
try {
|
||||
pdf = Client.getOutputFile(this.body, 'pdf')
|
||||
} catch (e) {}
|
||||
expect(pdf).to.exist
|
||||
})
|
||||
})
|
||||
|
||||
describe('with an invalid name', function () {
|
||||
beforeEach(function (done) {
|
||||
this.request.options.imageName = 'something/evil:1337'
|
||||
Client.compile(this.project_id, this.request, (error, res, body) => {
|
||||
this.error = error
|
||||
this.res = res
|
||||
this.body = body
|
||||
done(error)
|
||||
})
|
||||
})
|
||||
it('should return non success', function () {
|
||||
expect(this.res.statusCode).to.not.equal(200)
|
||||
})
|
||||
|
||||
it('should not return a PDF', function () {
|
||||
let pdf
|
||||
try {
|
||||
pdf = Client.getOutputFile(this.body, 'pdf')
|
||||
} catch (e) {}
|
||||
expect(pdf).to.not.exist
|
||||
})
|
||||
})
|
||||
|
||||
describe('wordcount', function () {
|
||||
beforeEach(function (done) {
|
||||
Client.compile(this.project_id, this.request, done)
|
||||
})
|
||||
it('should error out with an invalid imageName', function () {
|
||||
Client.wordcountWithImage(
|
||||
this.project_id,
|
||||
'main.tex',
|
||||
'something/evil:1337',
|
||||
(error, result) => {
|
||||
expect(String(error)).to.include('statusCode=400')
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should produce a texcout a valid imageName', function () {
|
||||
Client.wordcountWithImage(
|
||||
this.project_id,
|
||||
'main.tex',
|
||||
process.env.TEXLIVE_IMAGE,
|
||||
(error, result) => {
|
||||
expect(error).to.not.exist
|
||||
expect(result).to.exist
|
||||
expect(result.texcount).to.exist
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -24,7 +24,7 @@ const ChildProcess = require('child_process')
|
||||
const ClsiApp = require('./helpers/ClsiApp')
|
||||
const logger = require('logger-sharelatex')
|
||||
const Path = require('path')
|
||||
const fixturePath = (path) => {
|
||||
const fixturePath = path => {
|
||||
if (path.slice(0, 3) === 'tmp') {
|
||||
return '/tmp/clsi_acceptance_tests' + path.slice(3)
|
||||
}
|
||||
@@ -50,8 +50,8 @@ const convertToPng = function (pdfPath, pngPath, callback) {
|
||||
console.log(command)
|
||||
const convert = ChildProcess.exec(command)
|
||||
const stdout = ''
|
||||
convert.stdout.on('data', (chunk) => console.log('STDOUT', chunk.toString()))
|
||||
convert.stderr.on('data', (chunk) => console.log('STDERR', chunk.toString()))
|
||||
convert.stdout.on('data', chunk => console.log('STDOUT', chunk.toString()))
|
||||
convert.stderr.on('data', chunk => console.log('STDERR', chunk.toString()))
|
||||
return convert.on('exit', () => callback())
|
||||
}
|
||||
|
||||
@@ -66,11 +66,11 @@ const compare = function (originalPath, generatedPath, callback) {
|
||||
)} ${diff_file}`
|
||||
)
|
||||
let stderr = ''
|
||||
proc.stderr.on('data', (chunk) => (stderr += chunk))
|
||||
proc.stderr.on('data', chunk => (stderr += chunk))
|
||||
return proc.on('exit', () => {
|
||||
if (stderr.trim() === '0 (0)') {
|
||||
// remove output diff if test matches expected image
|
||||
fs.unlink(diff_file, (err) => {
|
||||
fs.unlink(diff_file, err => {
|
||||
if (err) {
|
||||
throw err
|
||||
}
|
||||
@@ -89,8 +89,8 @@ const checkPdfInfo = function (pdfPath, callback) {
|
||||
}
|
||||
const proc = ChildProcess.exec(`pdfinfo ${fixturePath(pdfPath)}`)
|
||||
let stdout = ''
|
||||
proc.stdout.on('data', (chunk) => (stdout += chunk))
|
||||
proc.stderr.on('data', (chunk) => console.log('STDERR', chunk.toString()))
|
||||
proc.stdout.on('data', chunk => (stdout += chunk))
|
||||
proc.stderr.on('data', chunk => console.log('STDERR', chunk.toString()))
|
||||
return proc.on('exit', () => {
|
||||
if (stdout.match(/Optimized:\s+yes/)) {
|
||||
return callback(null, true)
|
||||
@@ -136,14 +136,14 @@ const comparePdf = function (project_id, example_dir, callback) {
|
||||
return convertToPng(
|
||||
`tmp/${project_id}.pdf`,
|
||||
`tmp/${project_id}-generated.png`,
|
||||
(error) => {
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return convertToPng(
|
||||
`examples/${example_dir}/output.pdf`,
|
||||
`tmp/${project_id}-source.png`,
|
||||
(error) => {
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
@@ -163,7 +163,7 @@ const comparePdf = function (project_id, example_dir, callback) {
|
||||
}
|
||||
)
|
||||
} else {
|
||||
return compareMultiplePages(project_id, (error) => {
|
||||
return compareMultiplePages(project_id, error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
@@ -178,12 +178,7 @@ const comparePdf = function (project_id, example_dir, callback) {
|
||||
)
|
||||
}
|
||||
|
||||
const downloadAndComparePdf = function (
|
||||
project_id,
|
||||
example_dir,
|
||||
url,
|
||||
callback
|
||||
) {
|
||||
const downloadAndComparePdf = function(project_id, example_dir, url, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error) {}
|
||||
}
|
||||
@@ -217,9 +212,8 @@ describe('Example Documents', function () {
|
||||
fsExtra.remove(fixturePath('tmp'), done)
|
||||
})
|
||||
|
||||
return Array.from(fs.readdirSync(fixturePath('examples'))).map(
|
||||
(example_dir) =>
|
||||
((example_dir) =>
|
||||
return Array.from(fs.readdirSync(fixturePath('examples'))).map(example_dir =>
|
||||
(example_dir =>
|
||||
describe(example_dir, function() {
|
||||
before(function() {
|
||||
return (this.project_id = Client.randomId() + '_' + example_dir)
|
||||
@@ -237,16 +231,10 @@ describe('Example Documents', function () {
|
||||
error ||
|
||||
__guard__(
|
||||
body != null ? body.compile : undefined,
|
||||
(x) => x.status
|
||||
x => x.status
|
||||
) === 'failure'
|
||||
) {
|
||||
console.log(
|
||||
'DEBUG: error',
|
||||
error,
|
||||
'body',
|
||||
JSON.stringify(body)
|
||||
)
|
||||
return done(new Error('Compile failed'))
|
||||
console.log('DEBUG: error', error, 'body', JSON.stringify(body))
|
||||
}
|
||||
const pdf = Client.getOutputFile(body, 'pdf')
|
||||
return downloadAndComparePdf(
|
||||
@@ -271,16 +259,10 @@ describe('Example Documents', function () {
|
||||
error ||
|
||||
__guard__(
|
||||
body != null ? body.compile : undefined,
|
||||
(x) => x.status
|
||||
x => x.status
|
||||
) === 'failure'
|
||||
) {
|
||||
console.log(
|
||||
'DEBUG: error',
|
||||
error,
|
||||
'body',
|
||||
JSON.stringify(body)
|
||||
)
|
||||
return done(new Error('Compile failed'))
|
||||
console.log('DEBUG: error', error, 'body', JSON.stringify(body))
|
||||
}
|
||||
const pdf = Client.getOutputFile(body, 'pdf')
|
||||
return downloadAndComparePdf(
|
||||
|
||||
@@ -56,7 +56,7 @@ describe('Timed out compile', function () {
|
||||
})
|
||||
|
||||
return it('should return the log output file name', function() {
|
||||
const outputFilePaths = this.body.compile.outputFiles.map((x) => x.path)
|
||||
const outputFilePaths = this.body.compile.outputFiles.map(x => x.path)
|
||||
return outputFilePaths.should.include('output.log')
|
||||
})
|
||||
})
|
||||
|
||||
@@ -35,7 +35,9 @@ const Server = {
|
||||
getFile() {},
|
||||
|
||||
randomId() {
|
||||
return Math.random().toString(16).slice(2)
|
||||
return Math.random()
|
||||
.toString(16)
|
||||
.slice(2)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -336,7 +338,7 @@ describe('Url Caching', function () {
|
||||
]
|
||||
}
|
||||
|
||||
return Client.compile(this.project_id, this.request, (error) => {
|
||||
return Client.compile(this.project_id, this.request, error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
|
||||
@@ -23,7 +23,9 @@ module.exports = Client = {
|
||||
host: Settings.apis.clsi.url,
|
||||
|
||||
randomId() {
|
||||
return Math.random().toString(16).slice(2)
|
||||
return Math.random()
|
||||
.toString(16)
|
||||
.slice(2)
|
||||
},
|
||||
|
||||
compile(project_id, data, callback) {
|
||||
@@ -62,7 +64,7 @@ module.exports = Client = {
|
||||
const app = express()
|
||||
app.use(express.static(directory))
|
||||
console.log('starting test server on', port, host)
|
||||
return app.listen(port, host).on('error', (error) => {
|
||||
return app.listen(port, host).on('error', error => {
|
||||
console.error('error starting server:', error.message)
|
||||
return process.exit(1)
|
||||
})
|
||||
@@ -128,7 +130,7 @@ module.exports = Client = {
|
||||
entities = entities.concat(
|
||||
fs
|
||||
.readdirSync(`${baseDirectory}/${directory}/${entity}`)
|
||||
.map((subEntity) => {
|
||||
.map(subEntity => {
|
||||
if (subEntity === 'main.tex') {
|
||||
rootResourcePath = `${entity}/${subEntity}`
|
||||
}
|
||||
@@ -187,11 +189,6 @@ module.exports = Client = {
|
||||
},
|
||||
|
||||
wordcount(project_id, file, callback) {
|
||||
const image = undefined
|
||||
Client.wordcountWithImage(project_id, file, image, callback)
|
||||
},
|
||||
|
||||
wordcountWithImage(project_id, file, image, callback) {
|
||||
if (callback == null) {
|
||||
callback = function(error, pdfPositions) {}
|
||||
}
|
||||
@@ -199,7 +196,6 @@ module.exports = Client = {
|
||||
{
|
||||
url: `${this.host}/project/${project_id}/wordcount`,
|
||||
qs: {
|
||||
image,
|
||||
file
|
||||
}
|
||||
},
|
||||
@@ -207,9 +203,6 @@ module.exports = Client = {
|
||||
if (error != null) {
|
||||
return callback(error)
|
||||
}
|
||||
if (response.statusCode !== 200) {
|
||||
return callback(new Error(`statusCode=${response.statusCode}`))
|
||||
}
|
||||
return callback(null, JSON.parse(body))
|
||||
}
|
||||
)
|
||||
|
||||
@@ -35,10 +35,10 @@ module.exports = {
|
||||
return app.listen(
|
||||
__guard__(
|
||||
Settings.internal != null ? Settings.internal.clsi : undefined,
|
||||
(x) => x.port
|
||||
x => x.port
|
||||
),
|
||||
'localhost',
|
||||
(error) => {
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
|
||||
@@ -17,7 +17,7 @@ const _ = require('lodash')
|
||||
const concurentCompiles = 5
|
||||
const totalCompiles = 50
|
||||
|
||||
const buildUrl = (path) =>
|
||||
const buildUrl = path =>
|
||||
`http://${Settings.internal.clsi.host}:${Settings.internal.clsi.port}/${path}`
|
||||
|
||||
const mainTexContent = fs.readFileSync('./bulk.tex', 'utf-8')
|
||||
@@ -74,12 +74,12 @@ ${bodyContent}
|
||||
)
|
||||
}
|
||||
|
||||
const jobs = _.map(__range__(1, totalCompiles, true), (i) => (cb) =>
|
||||
const jobs = _.map(__range__(1, totalCompiles, true), i => cb =>
|
||||
makeRequest(i, cb)
|
||||
)
|
||||
|
||||
const startTime = new Date()
|
||||
async.parallelLimit(jobs, concurentCompiles, (err) => {
|
||||
async.parallelLimit(jobs, concurentCompiles, err => {
|
||||
if (err != null) {
|
||||
console.error(err)
|
||||
}
|
||||
|
||||
@@ -1,20 +1,20 @@
|
||||
const request = require('request')
|
||||
const Settings = require('settings-sharelatex')
|
||||
|
||||
const buildUrl = (path) =>
|
||||
const buildUrl = path =>
|
||||
`http://${Settings.internal.clsi.host}:${Settings.internal.clsi.port}/${path}`
|
||||
|
||||
const url = buildUrl(`project/smoketest-${process.pid}/compile`)
|
||||
|
||||
module.exports = {
|
||||
sendNewResult(res) {
|
||||
this._run((error) => this._sendResponse(res, error))
|
||||
this._run(error => this._sendResponse(res, error))
|
||||
},
|
||||
sendLastResult(res) {
|
||||
this._sendResponse(res, this._lastError)
|
||||
},
|
||||
triggerRun(cb) {
|
||||
this._run((error) => {
|
||||
this._run(error => {
|
||||
this._lastError = error
|
||||
cb(error)
|
||||
})
|
||||
|
||||
@@ -12,7 +12,6 @@
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const sinon = require('sinon')
|
||||
require('chai').should()
|
||||
const { expect } = require('chai')
|
||||
const modulePath = require('path').join(
|
||||
__dirname,
|
||||
'../../../app/js/CompileController'
|
||||
@@ -115,7 +114,7 @@ describe('CompileController', function () {
|
||||
compile: {
|
||||
status: 'success',
|
||||
error: null,
|
||||
outputFiles: this.output_files.map((file) => {
|
||||
outputFiles: this.output_files.map(file => {
|
||||
return {
|
||||
url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`,
|
||||
path: file.path,
|
||||
@@ -288,60 +287,21 @@ describe('CompileController', function () {
|
||||
this.CompileManager.wordcount = sinon
|
||||
.stub()
|
||||
.callsArgWith(4, null, (this.texcount = ['mock-texcount']))
|
||||
return this.CompileController.wordcount(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should return the word count of a file', function() {
|
||||
this.CompileController.wordcount(this.req, this.res, this.next)
|
||||
return this.CompileManager.wordcount
|
||||
.calledWith(this.project_id, undefined, this.file, this.image)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the texcount info', function () {
|
||||
this.CompileController.wordcount(this.req, this.res, this.next)
|
||||
return it('should return the texcount info', function() {
|
||||
return this.res.json
|
||||
.calledWith({
|
||||
texcount: this.texcount
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
describe('when allowedImages is set', function () {
|
||||
beforeEach(function () {
|
||||
this.Settings.clsi = { docker: {} }
|
||||
this.Settings.clsi.docker.allowedImages = [
|
||||
'repo/image:tag1',
|
||||
'repo/image:tag2'
|
||||
]
|
||||
this.res.send = sinon.stub()
|
||||
this.res.status = sinon.stub().returns({ send: this.res.send })
|
||||
})
|
||||
|
||||
describe('with an invalid image', function () {
|
||||
beforeEach(function () {
|
||||
this.req.query.image = 'something/evil:1337'
|
||||
this.CompileController.wordcount(this.req, this.res, this.next)
|
||||
})
|
||||
it('should return a 400', function () {
|
||||
expect(this.res.status.calledWith(400)).to.equal(true)
|
||||
})
|
||||
it('should not run the query', function () {
|
||||
expect(this.CompileManager.wordcount.called).to.equal(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with a valid image', function () {
|
||||
beforeEach(function () {
|
||||
this.req.query.image = 'repo/image:tag1'
|
||||
this.CompileController.wordcount(this.req, this.res, this.next)
|
||||
})
|
||||
it('should not return a 400', function () {
|
||||
expect(this.res.status.calledWith(400)).to.equal(false)
|
||||
})
|
||||
it('should run the query', function () {
|
||||
expect(this.CompileManager.wordcount.called).to.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -374,7 +374,7 @@ describe('CompileManager', function () {
|
||||
this.column = 3
|
||||
this.file_name = 'main.tex'
|
||||
this.child_process.execFile = sinon.stub()
|
||||
return (this.Settings.path.synctexBaseDir = (project_id) =>
|
||||
return (this.Settings.path.synctexBaseDir = project_id =>
|
||||
`${this.Settings.path.compilesDir}/${this.project_id}-${this.user_id}`)
|
||||
})
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@ describe('LockManager', function () {
|
||||
this.callback = sinon.stub()
|
||||
return this.LockManager.runWithLock(
|
||||
'lock-one',
|
||||
(releaseLock) =>
|
||||
releaseLock =>
|
||||
setTimeout(() => releaseLock(null, 'hello', 'world'), 100),
|
||||
|
||||
(err, ...args) => {
|
||||
@@ -59,7 +59,7 @@ describe('LockManager', function () {
|
||||
this.callback2 = sinon.stub()
|
||||
this.LockManager.runWithLock(
|
||||
'lock-one',
|
||||
(releaseLock) =>
|
||||
releaseLock =>
|
||||
setTimeout(() => releaseLock(null, 'hello', 'world', 'one'), 100),
|
||||
|
||||
(err, ...args) => {
|
||||
@@ -68,7 +68,7 @@ describe('LockManager', function () {
|
||||
)
|
||||
return this.LockManager.runWithLock(
|
||||
'lock-two',
|
||||
(releaseLock) =>
|
||||
releaseLock =>
|
||||
setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 200),
|
||||
|
||||
(err, ...args) => {
|
||||
@@ -100,7 +100,7 @@ describe('LockManager', function () {
|
||||
this.callback2 = sinon.stub()
|
||||
this.LockManager.runWithLock(
|
||||
'lock',
|
||||
(releaseLock) =>
|
||||
releaseLock =>
|
||||
setTimeout(() => releaseLock(null, 'hello', 'world', 'one'), 100),
|
||||
|
||||
(err, ...args) => {
|
||||
@@ -109,7 +109,7 @@ describe('LockManager', function () {
|
||||
)
|
||||
return this.LockManager.runWithLock(
|
||||
'lock',
|
||||
(releaseLock) =>
|
||||
releaseLock =>
|
||||
setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 200),
|
||||
|
||||
(err, ...args) => {
|
||||
@@ -154,7 +154,7 @@ describe('LockManager', function () {
|
||||
}
|
||||
this.LockManager.runWithLock(
|
||||
'lock',
|
||||
(releaseLock) =>
|
||||
releaseLock =>
|
||||
setTimeout(
|
||||
() => releaseLock(null, 'hello', 'world', 'one'),
|
||||
1100
|
||||
@@ -167,7 +167,7 @@ describe('LockManager', function () {
|
||||
)
|
||||
return this.LockManager.runWithLock(
|
||||
'lock',
|
||||
(releaseLock) =>
|
||||
releaseLock =>
|
||||
setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 100),
|
||||
|
||||
(err, ...args) => {
|
||||
@@ -211,7 +211,7 @@ describe('LockManager', function () {
|
||||
}
|
||||
this.LockManager.runWithLock(
|
||||
'lock',
|
||||
(releaseLock) =>
|
||||
releaseLock =>
|
||||
setTimeout(
|
||||
() => releaseLock(null, 'hello', 'world', 'one'),
|
||||
1500
|
||||
@@ -224,7 +224,7 @@ describe('LockManager', function () {
|
||||
)
|
||||
return this.LockManager.runWithLock(
|
||||
'lock',
|
||||
(releaseLock) =>
|
||||
releaseLock =>
|
||||
setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 100),
|
||||
|
||||
(err, ...args) => {
|
||||
|
||||
@@ -273,7 +273,7 @@ describe('DockerRunner', function () {
|
||||
})
|
||||
})
|
||||
|
||||
describe('with image override', function () {
|
||||
return describe('with image override', function() {
|
||||
beforeEach(function() {
|
||||
this.Settings.texliveImageNameOveride = 'overrideimage.com/something'
|
||||
this.DockerRunner._runAndWaitForContainer = sinon
|
||||
@@ -296,62 +296,6 @@ describe('DockerRunner', function () {
|
||||
return image.should.equal('overrideimage.com/something/image:2016.2')
|
||||
})
|
||||
})
|
||||
|
||||
describe('with image restriction', function () {
|
||||
beforeEach(function () {
|
||||
this.Settings.clsi.docker.allowedImages = [
|
||||
'repo/image:tag1',
|
||||
'repo/image:tag2'
|
||||
]
|
||||
this.DockerRunner._runAndWaitForContainer = sinon
|
||||
.stub()
|
||||
.callsArgWith(3, null, (this.output = 'mock-output'))
|
||||
})
|
||||
|
||||
describe('with a valid image', function () {
|
||||
beforeEach(function () {
|
||||
this.DockerRunner.run(
|
||||
this.project_id,
|
||||
this.command,
|
||||
this.directory,
|
||||
'repo/image:tag1',
|
||||
this.timeout,
|
||||
this.env,
|
||||
this.compileGroup,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should setup the container', function () {
|
||||
this.DockerRunner._getContainerOptions.called.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with a invalid image', function () {
|
||||
beforeEach(function () {
|
||||
this.DockerRunner.run(
|
||||
this.project_id,
|
||||
this.command,
|
||||
this.directory,
|
||||
'something/different:evil',
|
||||
this.timeout,
|
||||
this.env,
|
||||
this.compileGroup,
|
||||
this.callback
|
||||
)
|
||||
})
|
||||
|
||||
it('should call the callback with an error', function () {
|
||||
const err = new Error('image not allowed')
|
||||
this.callback.called.should.equal(true)
|
||||
this.callback.args[0][0].message.should.equal(err.message)
|
||||
})
|
||||
|
||||
it('should not setup the container', function () {
|
||||
this.DockerRunner._getContainerOptions.called.should.equal(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('run with _getOptions', function() {
|
||||
@@ -738,7 +682,7 @@ describe('DockerRunner', function () {
|
||||
this.DockerRunner.MAX_CONTAINER_AGE = oneHourInMilliseconds
|
||||
this.listContainers.callsArgWith(1, null, this.containers)
|
||||
this.DockerRunner.destroyContainer = sinon.stub().callsArg(3)
|
||||
return this.DockerRunner.destroyOldContainers((error) => {
|
||||
return this.DockerRunner.destroyOldContainers(error => {
|
||||
this.callback(error)
|
||||
return done()
|
||||
})
|
||||
@@ -785,7 +729,7 @@ describe('DockerRunner', function () {
|
||||
return this.DockerRunner._destroyContainer(
|
||||
this.containerId,
|
||||
false,
|
||||
(err) => {
|
||||
err => {
|
||||
this.Docker.prototype.getContainer.callCount.should.equal(1)
|
||||
this.Docker.prototype.getContainer
|
||||
.calledWith(this.containerId)
|
||||
@@ -799,10 +743,10 @@ describe('DockerRunner', function () {
|
||||
return this.DockerRunner._destroyContainer(
|
||||
this.containerId,
|
||||
true,
|
||||
(err) => {
|
||||
err => {
|
||||
this.fakeContainer.remove.callCount.should.equal(1)
|
||||
this.fakeContainer.remove
|
||||
.calledWith({ force: true, v: true })
|
||||
.calledWith({ force: true })
|
||||
.should.equal(true)
|
||||
return done()
|
||||
}
|
||||
@@ -813,10 +757,10 @@ describe('DockerRunner', function () {
|
||||
return this.DockerRunner._destroyContainer(
|
||||
this.containerId,
|
||||
false,
|
||||
(err) => {
|
||||
err => {
|
||||
this.fakeContainer.remove.callCount.should.equal(1)
|
||||
this.fakeContainer.remove
|
||||
.calledWith({ force: false, v: true })
|
||||
.calledWith({ force: false })
|
||||
.should.equal(true)
|
||||
return done()
|
||||
}
|
||||
@@ -827,7 +771,7 @@ describe('DockerRunner', function () {
|
||||
return this.DockerRunner._destroyContainer(
|
||||
this.containerId,
|
||||
false,
|
||||
(err) => {
|
||||
err => {
|
||||
expect(err).to.equal(null)
|
||||
return done()
|
||||
}
|
||||
@@ -850,7 +794,7 @@ describe('DockerRunner', function () {
|
||||
return this.DockerRunner._destroyContainer(
|
||||
this.containerId,
|
||||
false,
|
||||
(err) => {
|
||||
err => {
|
||||
expect(err).to.equal(null)
|
||||
return done()
|
||||
}
|
||||
@@ -874,7 +818,7 @@ describe('DockerRunner', function () {
|
||||
return this.DockerRunner._destroyContainer(
|
||||
this.containerId,
|
||||
false,
|
||||
(err) => {
|
||||
err => {
|
||||
expect(err).to.not.equal(null)
|
||||
expect(err).to.equal(this.fakeError)
|
||||
return done()
|
||||
@@ -894,7 +838,7 @@ describe('DockerRunner', function () {
|
||||
})
|
||||
|
||||
it('should get the container', function(done) {
|
||||
return this.DockerRunner.kill(this.containerId, (err) => {
|
||||
return this.DockerRunner.kill(this.containerId, err => {
|
||||
this.Docker.prototype.getContainer.callCount.should.equal(1)
|
||||
this.Docker.prototype.getContainer
|
||||
.calledWith(this.containerId)
|
||||
@@ -904,14 +848,14 @@ describe('DockerRunner', function () {
|
||||
})
|
||||
|
||||
it('should try to force-destroy the container', function(done) {
|
||||
return this.DockerRunner.kill(this.containerId, (err) => {
|
||||
return this.DockerRunner.kill(this.containerId, err => {
|
||||
this.fakeContainer.kill.callCount.should.equal(1)
|
||||
return done()
|
||||
})
|
||||
})
|
||||
|
||||
it('should not produce an error', function(done) {
|
||||
return this.DockerRunner.kill(this.containerId, (err) => {
|
||||
return this.DockerRunner.kill(this.containerId, err => {
|
||||
expect(err).to.equal(undefined)
|
||||
return done()
|
||||
})
|
||||
@@ -932,7 +876,7 @@ describe('DockerRunner', function () {
|
||||
})
|
||||
|
||||
return it('should not produce an error', function(done) {
|
||||
return this.DockerRunner.kill(this.containerId, (err) => {
|
||||
return this.DockerRunner.kill(this.containerId, err => {
|
||||
expect(err).to.equal(undefined)
|
||||
return done()
|
||||
})
|
||||
@@ -953,7 +897,7 @@ describe('DockerRunner', function () {
|
||||
})
|
||||
|
||||
return it('should produce an error', function(done) {
|
||||
return this.DockerRunner.kill(this.containerId, (err) => {
|
||||
return this.DockerRunner.kill(this.containerId, err => {
|
||||
expect(err).to.not.equal(undefined)
|
||||
expect(err).to.equal(this.fakeError)
|
||||
return done()
|
||||
|
||||
@@ -144,7 +144,7 @@ describe('LatexRunner', function () {
|
||||
return it('should include the flags in the command', function() {
|
||||
const command = this.CommandRunner.run.args[0][1]
|
||||
const flags = command.filter(
|
||||
(arg) => arg === '-file-line-error' || arg === '-halt-on-error'
|
||||
arg => arg === '-file-line-error' || arg === '-halt-on-error'
|
||||
)
|
||||
flags.length.should.equal(2)
|
||||
flags[0].should.equal('-file-line-error')
|
||||
|
||||
@@ -126,7 +126,10 @@ describe('OutputFileOptimiser', function () {
|
||||
.stub()
|
||||
.withArgs(this.fd)
|
||||
.yields(null, 100, Buffer.from('hello /Linearized 1'))
|
||||
this.fs.close = sinon.stub().withArgs(this.fd).yields(null)
|
||||
this.fs.close = sinon
|
||||
.stub()
|
||||
.withArgs(this.fd)
|
||||
.yields(null)
|
||||
return this.OutputFileOptimiser.checkIfPDFIsOptimised(
|
||||
this.src,
|
||||
this.callback
|
||||
|
||||
@@ -109,7 +109,7 @@ describe('ProjectPersistenceManager', function () {
|
||||
})
|
||||
|
||||
it('should clear each expired project', function() {
|
||||
return Array.from(this.project_ids).map((project_id) =>
|
||||
return Array.from(this.project_ids).map(project_id =>
|
||||
this.ProjectPersistenceManager.clearProjectFromCache
|
||||
.calledWith(project_id)
|
||||
.should.equal(true)
|
||||
|
||||
@@ -114,48 +114,6 @@ describe('RequestParser', function () {
|
||||
})
|
||||
})
|
||||
|
||||
describe('when image restrictions are present', function () {
|
||||
beforeEach(function () {
|
||||
this.settings.clsi = { docker: {} }
|
||||
this.settings.clsi.docker.allowedImages = [
|
||||
'repo/name:tag1',
|
||||
'repo/name:tag2'
|
||||
]
|
||||
})
|
||||
|
||||
describe('with imageName set to something invalid', function () {
|
||||
beforeEach(function () {
|
||||
const request = this.validRequest
|
||||
request.compile.options.imageName = 'something/different:latest'
|
||||
this.RequestParser.parse(request, (error, data) => {
|
||||
this.error = error
|
||||
this.data = data
|
||||
})
|
||||
})
|
||||
|
||||
it('should throw an error for imageName', function () {
|
||||
expect(String(this.error)).to.include(
|
||||
'imageName attribute should be one of'
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with imageName set to something valid', function () {
|
||||
beforeEach(function () {
|
||||
const request = this.validRequest
|
||||
request.compile.options.imageName = 'repo/name:tag1'
|
||||
this.RequestParser.parse(request, (error, data) => {
|
||||
this.error = error
|
||||
this.data = data
|
||||
})
|
||||
})
|
||||
|
||||
it('should set the imageName', function () {
|
||||
this.data.imageName.should.equal('repo/name:tag1')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('with flags set', function() {
|
||||
beforeEach(function() {
|
||||
this.validRequest.compile.options.flags = ['-file-line-error']
|
||||
|
||||
@@ -78,7 +78,7 @@ describe('ResourceWriter', function () {
|
||||
})
|
||||
|
||||
it('should write each resource to disk', function() {
|
||||
return Array.from(this.resources).map((resource) =>
|
||||
return Array.from(this.resources).map(resource =>
|
||||
this.ResourceWriter._writeResourceToDisk
|
||||
.calledWith(this.project_id, resource, this.basePath)
|
||||
.should.equal(true)
|
||||
@@ -139,7 +139,7 @@ describe('ResourceWriter', function () {
|
||||
})
|
||||
|
||||
it('should write each resource to disk', function() {
|
||||
return Array.from(this.resources).map((resource) =>
|
||||
return Array.from(this.resources).map(resource =>
|
||||
this.ResourceWriter._writeResourceToDisk
|
||||
.calledWith(this.project_id, resource, this.basePath)
|
||||
.should.equal(true)
|
||||
|
||||
@@ -342,7 +342,7 @@ describe('UrlCache', function () {
|
||||
})
|
||||
|
||||
it('should clear the cache for each url in the project', function() {
|
||||
return Array.from(this.urls).map((url) =>
|
||||
return Array.from(this.urls).map(url =>
|
||||
this.UrlCache._clearUrlFromCache
|
||||
.calledWith(this.project_id, url)
|
||||
.should.equal(true)
|
||||
|
||||
@@ -40,7 +40,7 @@ describe('UrlFetcher', function () {
|
||||
|
||||
it('should call pipeUrlToFile', function(done) {
|
||||
this.UrlFetcher.pipeUrlToFile.callsArgWith(2)
|
||||
this.UrlFetcher.pipeUrlToFileWithRetry(this.url, this.path, (err) => {
|
||||
this.UrlFetcher.pipeUrlToFileWithRetry(this.url, this.path, err => {
|
||||
expect(err).to.equal(undefined)
|
||||
this.UrlFetcher.pipeUrlToFile.called.should.equal(true)
|
||||
done()
|
||||
@@ -50,7 +50,7 @@ describe('UrlFetcher', function () {
|
||||
it('should call pipeUrlToFile multiple times on error', function(done) {
|
||||
const error = new Error("couldn't download file")
|
||||
this.UrlFetcher.pipeUrlToFile.callsArgWith(2, error)
|
||||
this.UrlFetcher.pipeUrlToFileWithRetry(this.url, this.path, (err) => {
|
||||
this.UrlFetcher.pipeUrlToFileWithRetry(this.url, this.path, err => {
|
||||
expect(err).to.equal(error)
|
||||
this.UrlFetcher.pipeUrlToFile.callCount.should.equal(3)
|
||||
done()
|
||||
@@ -60,7 +60,7 @@ describe('UrlFetcher', function () {
|
||||
it('should call pipeUrlToFile twice if only 1 error', function(done) {
|
||||
this.UrlFetcher.pipeUrlToFile.onCall(0).callsArgWith(2, 'error')
|
||||
this.UrlFetcher.pipeUrlToFile.onCall(1).callsArgWith(2)
|
||||
this.UrlFetcher.pipeUrlToFileWithRetry(this.url, this.path, (err) => {
|
||||
this.UrlFetcher.pipeUrlToFileWithRetry(this.url, this.path, err => {
|
||||
expect(err).to.equal(undefined)
|
||||
this.UrlFetcher.pipeUrlToFile.callCount.should.equal(2)
|
||||
done()
|
||||
@@ -167,7 +167,7 @@ describe('UrlFetcher', function () {
|
||||
|
||||
describe('with non success status code', function() {
|
||||
beforeEach(function(done) {
|
||||
this.UrlFetcher.pipeUrlToFile(this.url, this.path, (err) => {
|
||||
this.UrlFetcher.pipeUrlToFile(this.url, this.path, err => {
|
||||
this.callback(err)
|
||||
return done()
|
||||
})
|
||||
@@ -188,7 +188,7 @@ describe('UrlFetcher', function () {
|
||||
|
||||
return describe('with error', function() {
|
||||
beforeEach(function(done) {
|
||||
this.UrlFetcher.pipeUrlToFile(this.url, this.path, (err) => {
|
||||
this.UrlFetcher.pipeUrlToFile(this.url, this.path, err => {
|
||||
this.callback(err)
|
||||
return done()
|
||||
})
|
||||
|
||||
Reference in New Issue
Block a user