Commit 4e444124 authored by Jure's avatar Jure

Merge branch 'master' into data-loaders-registration

parents 7c266683 56f8887d
Pipeline #13098 passed with stages
in 15 minutes and 6 seconds
......@@ -9,7 +9,6 @@ stages:
- test
- review
- audit
- staging
- production
build:
......@@ -24,6 +23,9 @@ build:
- docker login -u $DOCKERHUB_USERNAME -p $DOCKERHUB_PASSWORD
- echo "Ignore warning! Cannot perform an interactive login from a non TTY device"
- docker push $IMAGE_ORG/$IMAGE_NAME:$CI_COMMIT_SHA
# - echo "Building XSweet job runner image"
# - docker build -t pubsweet/job-xsweet:$CI_COMMIT_SHA components/server/job-xsweet
# - docker push pubsweet/job-xsweet:$CI_COMMIT_SHA
pages:
image: $IMAGE_ORG/$IMAGE_NAME:$CI_COMMIT_SHA
......@@ -130,17 +132,27 @@ test:
# specify host here else it confuses the linked postgres image
- PGHOST=postgres yarn test
# if tests pass we will push latest, labelled with current commit hash
push:latest:
image: docker:latest
stage: staging
script:
- if [ -z "$DOCKERHUB_USERNAME" ] || [ -z "$DOCKERHUB_PASSWORD" ]; then echo "Not pushing" && exit 0; fi
- docker login -u $DOCKERHUB_USERNAME -p $DOCKERHUB_PASSWORD
- echo "Ignore warning! Cannot perform an interactive login from a non TTY device"
- docker build -t $IMAGE_ORG/$IMAGE_NAME:latest --label COMMIT_SHA=$CI_COMMIT_SHA .
- docker push $IMAGE_ORG/$IMAGE_NAME:latest
only:
- master
except:
- tags
\ No newline at end of file
# Reenable this once GitLab docker networking is sorted:
# https://gitlab.com/gitlab-org/gitlab-runner/merge_requests/1041
# test:job-xsweet:
# image: $IMAGE_ORG/$IMAGE_NAME:$CI_COMMIT_SHA
# stage: test
# variables:
# # don't clone repo as image already has it
# GIT_STRATEGY: none
# # setup data for postgres image
# POSTGRES_USER: test
# POSTGRES_PASSWORD: pw
# # connection details for tests
# PGUSER: test
# PGPASSWORD: pw
# NODE_ENV: test
# DATABASE_URL: postgres://test:pw@postgres/test
# services:
# - postgres
# - pubsweet/job-xsweet
# except:
# - tags
# script:
# - cd ${HOME}
# - PGHOST=postgres cd components/server/job-xsweet && yarn jest --testRegex test/standaloneXsweetTest.js
\ No newline at end of file
......@@ -6,6 +6,7 @@
| [![MIT license](https://img.shields.io/badge/license-MIT-e51879.svg)](https://gitlab.coko.foundation/pubsweet/pubsweet/raw/master/LICENSE) [![mattermost](https://img.shields.io/badge/mattermost_chat-coko%2Fpubsweet-blue.svg)](https://mattermost.coko.foundation/coko/channels/pubsweet) [![Commitizen friendly](https://img.shields.io/badge/commitizen-friendly-brightgreen.svg)](http://commitizen.github.io/cz-cli/) |
| :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: |
# Overview
**PubSweet** allows you to build state-of-the-art publishing platforms.
......
......@@ -20,15 +20,17 @@ RUN wget https://gitlab.coko.foundation/XSweet/editoria_typescript/repository/ar
RUN wget https://gitlab.coko.foundation/XSweet/HTMLevator/repository/archive.zip?ref=eeef394426f7db7647d992aafdda1b677b35b7ae -O htmlevator.zip; unzip htmlevator.zip; rm htmlevator.zip
# Download Saxon
RUN wget "https://downloads.sourceforge.net/project/saxon/Saxon-HE/9.8/SaxonHE9-8-0-1J.zip" -O saxon.zip; unzip saxon.zip -d saxon; rm saxon.zip
RUN wget "https://downloads.sourceforge.net/project/saxon/Saxon-HE/9.9/SaxonHE9-9-1-1J.zip" -O saxon.zip; unzip saxon.zip -d saxon; rm saxon.zip
COPY . .
# Put things in place
RUN ./src/move_xslts.sh
# Test conversion
RUN mkdir _test
RUN unzip src/test.docx -d _test
RUN ./src/execute_chain.sh _test
RUN grep -q 'Testing conversion 123' _test/outputs/16HTML5.html
RUN grep -q 'Testing conversion 123' _test/outputs/HTML5.html
RUN rm -rf _test
# Start job processing
......
......@@ -29,3 +29,20 @@ Run it with: `docker run -e DATABASE_URL=yourdatabaseurl pubsweet/job-xsweet:lat
The DATABASE_URL is needed so that the Docker container knows how to connect to your database.
After the Docker container starts up, jobs from the endpoint will start to be processed.
# Testing
Testing has to be done semi-automatically for now, as GitLab CI does not allow for service-2-service communication yet (https://gitlab.com/gitlab-org/gitlab-runner/merge_requests/1041), by running:
```
cd components/server/job-xsweet && yarn test --testRegex test/standaloneXsweetTest.js
```
And in a separate window, building and starting the `job-xsweet` container like so:
```
docker build -t pubsweet/job-xsweet components/server/job-xsweet
docker run -e DATABASE_URL="postgres://yourUsername@host.docker.internal/test" pubsweet/job-xsweet
```
This will test both the GraphQL subscription and the classic long-running HTTP request path.
module.exports = {
'pubsweet-server': {
db: process.env.DATABASE_URL,
},
}
const path = require('path')
module.exports = {
'pubsweet-server': {
db: {
database: 'test',
},
pool: { min: 0, max: 10, idleTimeoutMillis: 1000 },
port: 4000,
secret: 'test',
},
pubsweet: {
components: [
'@pubsweet/model-user',
'@pubsweet/model-team',
path.join(__dirname, '../src'),
],
},
authsome: {
mode: 'pubsweet-server/test/helpers/authsome_mode',
},
}
......@@ -9,8 +9,11 @@
"author": "Adam Hyde",
"license": "MIT",
"dependencies": {
"@pubsweet/db-manager": "^3.0.15",
"@pubsweet/logger": "^0.2.29",
"express-fileupload": "v1.1.1-alpha.2",
"passport": "^0.4.0",
"waait": "^1.0.5",
"pubsweet-server": "^13.5.2",
"tmp-promise": "^2.0.0"
},
......
......@@ -11,113 +11,26 @@ DIRECTORY=$(cd `dirname $0` && pwd)
TEMP=$1
XSWEET=$(find ${DIRECTORY}/.. -maxdepth 1 -name "XSweet*" -print -quit)
TYPESCRIPT=$(find ${DIRECTORY}/.. -maxdepth 1 -name 'editoria_typescript*' -print -quit)
HTMLEVATOR=$(find ${DIRECTORY}/.. -maxdepth 1 -name 'HTMLevator*' -print -quit)
# TYPESCRIPT=$(find ${DIRECTORY}/.. -maxdepth 1 -name 'editoria_typescript*' -print -quit)
# HTMLEVATOR=$(find ${DIRECTORY}/.. -maxdepth 1 -name 'HTMLevator*' -print -quit)
SAXONDIR=$(find ${DIRECTORY}/.. -maxdepth 1 -name 'saxon' -print -quit)
echo $XSWEET
echo $TYPESCRIPT
echo $HTMLEVATOR
# echo $TYPESCRIPT
# echo $HTMLEVATOR
# Note Saxon is included with this distribution, qv for license.
saxonHE="java -jar ${SAXONDIR}/saxon9he.jar" # SaxonHE (XSLT 3.0 processor)
# EXTRACTION
EXTRACT="${XSWEET}/applications/docx-extract/docx-html-extract.xsl" # "Extraction" stylesheet
# NOTE: RUNS TABLE EXTRACTION FROM INSIDE EXTRACT
NOTES="${XSWEET}/applications/docx-extract/handle-notes.xsl" # "Refinement" stylesheets
SCRUB="${XSWEET}/applications/docx-extract/scrub.xsl"
JOIN="${XSWEET}/applications/docx-extract/join-elements.xsl"
COLLAPSEPARA="${XSWEET}/applications/docx-extract/collapse-paragraphs.xsl"
LINKS="${HTMLEVATOR}/applications/hyperlink-inferencer/hyperlink-inferencer.xsl"
PROMOTELISTS="${XSWEET}/applications/list-promote/PROMOTE-lists.xsl"
# NOTE: RUNS mark-lists, then itemize-lists
# HEADER PROMOTION
HEADERCHOOSEANDPROMOTE="${HTMLEVATOR}/applications/header-promote/header-promotion-CHOOSE.xsl"
DIGESTPARA="${HTMLEVATOR}/applications/header-promote/digest-paragraphs.xsl"
MAKEHEADERXSLT="${HTMLEVATOR}/applications/header-promote/make-header-escalator-xslt.xsl"
MATH="${XSWEET}/applications/math/xsweet_tei_omml2mml.xsl"
FINALRINSE="${XSWEET}/applications/html-polish/final-rinse.xsl"
UCPTEXT="${HTMLEVATOR}/applications/ucp-cleanup/ucp-text-macros.xsl"
UCPMAP="${HTMLEVATOR}/applications/ucp-cleanup/ucp-mappings.xsl"
# TYPESCRIPT
SPLITONBR="${TYPESCRIPT}/p-split-around-br.xsl"
EDITORIABASIC="${TYPESCRIPT}/editoria-basic.xsl"
EDITORIAREDUCE="${TYPESCRIPT}/editoria-reduce.xsl"
PIPELINE="${XSWEET}/applications/PIPELINE.xsl" # "Extraction" stylesheet
XMLTOHTML5="${XSWEET}/applications/html-polish/html5-serialize.xsl"
# INDUCESECTIONS="applications/htmlevator/applications/induce-sections/induce-sections.xsl"
# Intermediate and final outputs (serializations) are all left on the file system.
$saxonHE -xsl:$EXTRACT -s:$TEMP/word/document.xml -o:$TEMP/outputs/1EXTRACTED.xhtml
echo Made 1EXTRACTED.xhtml
$saxonHE -xsl:$NOTES -s:$TEMP/outputs/1EXTRACTED.xhtml -o:$TEMP/outputs/2NOTES.xhtml
echo Made 2NOTES.xhtml
$saxonHE -xsl:$SCRUB -s:$TEMP/outputs/2NOTES.xhtml -o:$TEMP/outputs/3SCRUBBED.xhtml
echo Made 3SCRUBBED.xhtml
$saxonHE -xsl:$JOIN -s:$TEMP/outputs/3SCRUBBED.xhtml -o:$TEMP/outputs/4JOINED.xhtml
echo Made 4JOINED.xhtml
$saxonHE -xsl:$COLLAPSEPARA -s:$TEMP/outputs/4JOINED.xhtml -o:$TEMP/outputs/5COLLAPSED.xhtml
echo Made 5COLLAPSED.xhtml
$saxonHE -xsl:$LINKS -s:$TEMP/outputs/5COLLAPSED.xhtml -o:$TEMP/outputs/6LINKS.xhtml
echo Made 6LINKS.xhtml
$saxonHE -xsl:$PROMOTELISTS -s:$TEMP/outputs/6LINKS.xhtml -o:$TEMP/outputs/7PROMOTELISTS.xhtml
echo Made 7PROMOTELISTS.xhtml
$saxonHE -xsl:$HEADERCHOOSEANDPROMOTE -s:$TEMP/outputs/7PROMOTELISTS.xhtml -o:$TEMP/outputs/8HEADERSPROMOTED.xhtml
echo Made 8HEADERSPROMOTED.xhtml
# CLASSIC HP
# $saxonHE -xsl:$DIGESTPARA -s:$TEMP/outputs/7PROMOTELISTS.xhtml -o:$TEMP/outputs/8DIGESTEDPARA.xhtml
# echo Made 8DIGESTEDPARA.xhtml
#
# $saxonHE -xsl:$MAKEHEADERXSLT -s:$TEMP/outputs/8DIGESTEDPARA.xhtml -o:$TEMP/outputs/9BESPOKEHEADERXSLT.xsl
# echo Made 9BESPOKEHEADERXSLT.xsl
# HEADERXSL="$TEMP/outputs/9BESPOKEHEADERXSLT.xsl"
#
# $saxonHE -xsl:$HEADERXSL -s:$TEMP/outputs/7PROMOTELISTS.xhtml -o:$TEMP/outputs/10CLASSICHEADERSPROMOTED.xhtml
# echo Made 10CLASSICHEADERSPROMOTED.xhtml
$saxonHE -xsl:$MATH -s:$TEMP/outputs/8HEADERSPROMOTED.xhtml -o:$TEMP/outputs/9MATH.xhtml
echo Made 9MATH.xhtml
$saxonHE -xsl:$FINALRINSE -s:$TEMP/outputs/9MATH.xhtml -o:$TEMP/outputs/10RINSED.xhtml
echo Made 10RINSED.xhtml
$saxonHE -xsl:$UCPTEXT -s:$TEMP/outputs/10RINSED.xhtml -o:$TEMP/outputs/11UCPTEXTED.xhtml
echo Made 11UCPTEXTED.xhtml
$saxonHE -xsl:$UCPMAP -s:$TEMP/outputs/11UCPTEXTED.xhtml -o:$TEMP/outputs/12UCPMAPPED.xhtml
echo Made 12UCPMAPPED.xhtml
$saxonHE -xsl:$SPLITONBR -s:$TEMP/outputs/12UCPMAPPED.xhtml -o:$TEMP/outputs/13SPLITONBR.xhtml
echo Made 13SPLITONBR.xhtml
# $saxonHE -xsl:$EDITORIANOTES -s:$TEMP/outputs/13SPLITONBR.xhtml -o:$TEMP/outputs/13EDITORIANOTES.xhtml
# echo Made 13EDITORIANOTES.xhtml
$saxonHE -xsl:$EDITORIABASIC -s:$TEMP/outputs/13SPLITONBR.xhtml -o:$TEMP/outputs/14EDITORIABASIC.xhtml
echo Made 14EDITORIABASIC.xhtml
$saxonHE -xsl:$EDITORIAREDUCE -s:$TEMP/outputs/14EDITORIABASIC.xhtml -o:$TEMP/outputs/15EDITORIAREDUCE.html
echo Made 15EDITORIAREDUCE.html
$saxonHE -xsl:$PIPELINE -s:$TEMP/word/document.xml -o:$TEMP/outputs/PIPELINED.xhtml
echo Made PIPELINED.xhtml
$saxonHE -xsl:$XMLTOHTML5 -s:$TEMP/outputs/15EDITORIAREDUCE.html -o:$TEMP/outputs/16HTML5.html
echo Made 16HTML5.html
\ No newline at end of file
$saxonHE -xsl:$XMLTOHTML5 -s:$TEMP/outputs/PIPELINED.xhtml -o:$TEMP/outputs/HTML5.html
echo Made HTML5.html
\ No newline at end of file
const {
jobs: { connectToJobQueue },
} = require('pubsweet-server')
const { getPubsub } = require('pubsweet-server/src/graphql/pubsub')
const { db } = require('@pubsweet/db-manager')
const logger = require('@pubsweet/logger')
const DOCX_TO_HTML = 'DOCX_TO_HTML'
const crypto = require('crypto')
const waait = require('waait')
const resolvers = {
Mutation: {
createDocxToHTMLJob: async (_, { file, fileSize }, context) => {
const jobQueue = await connectToJobQueue()
const pubsub = await getPubsub()
const { createReadStream, filename } = await file
const stream = await createReadStream()
const jobId = crypto.randomBytes(3).toString('hex')
const pubsubChannel = `${DOCX_TO_HTML}.${context.user}.${jobId}`
// A reference to actual pgboss job row
let queueJobId
pubsub.subscribe(pubsubChannel, async ({ docxToHTMLJob: { status } }) => {
logger.info(pubsubChannel, status)
if (status === 'Conversion complete') {
await waait(1000)
// console.log(job, queueJobId, db)
pubsub.publish(pubsubChannel, {
docxToHTMLJob: {
status: 'Done',
id: queueJobId,
},
})
}
})
pubsub.publish(pubsubChannel, {
docxToHTMLJob: {
status: `Uploading file ${filename}`,
id: jobId,
},
})
const chunks = []
stream.on('data', chunk => {
chunks.push(chunk)
})
stream.on('end', () => {
pubsub.publish(pubsubChannel, {
docxToHTMLJob: {
status: 'File uploaded and conversion job created',
id: jobId,
},
})
const result = Buffer.concat(chunks)
jobQueue
.publish(`xsweetGraphQL`, {
docx: {
name: filename,
data: result.toString('base64'),
},
pubsubChannel,
})
.then(id => (queueJobId = id))
})
stream.on('error', e => {
pubsub.publish(pubsubChannel, {
status: e,
})
})
return {
status: 'Uploading file',
id: jobId,
}
},
},
Query: {
docxToHTMLJob: async (_, { jobId }, context) => {
const job = await db('pgboss.job').whereRaw(
"data->'request'->>'id' = ?",
[jobId],
)
return {
status: 'Final',
html: job[0].data.response.html,
}
},
},
Subscription: {
docxToHTMLJob: {
subscribe: async (_, { jobId }, context) => {
const pubsub = await getPubsub()
return pubsub.asyncIterator(`${DOCX_TO_HTML}.${context.user}.${jobId}`)
},
},
},
}
const typeDefs = `
extend type Mutation {
# Upload a file, store it on the server and return the file url
createDocxToHTMLJob(file: Upload!, fileSize: Int): DocxToHTMLJob
}
extend type Subscription {
docxToHTMLJob(jobId: String!): DocxToHTMLJob!
}
extend type Query {
docxToHTMLJob(jobId: String!): DocxToHTMLJob!
}
type DocxToHTMLJob {
id: String
status: String!
html: String
}
`
module.exports = { typeDefs, resolvers }
// TODO: No support for job runners in the core. Must run standalone.
module.exports = {
server: () => app => require('./endpoint')(app),
// TODO: No support for job components in the core. Must run standalone.
...require('./graphql'),
}
#!/bin/bash
# Directory of script
DIRECTORY=$(cd `dirname $0` && pwd)
TEMP=$1
XSWEET=$(find ${DIRECTORY}/.. -maxdepth 1 -name "XSweet*" -print -quit)
TYPESCRIPT=$(find ${DIRECTORY}/.. -maxdepth 1 -name 'editoria_typescript*' -print -quit)
HTMLEVATOR=$(find ${DIRECTORY}/.. -maxdepth 1 -name 'HTMLevator*' -print -quit)
SAXONDIR=$(find ${DIRECTORY}/.. -maxdepth 1 -name 'saxon' -print -quit)
echo $XSWEET
echo $TYPESCRIPT
echo $HTMLEVATOR
mv $XSWEET XSweet
mv $TYPESCRIPT XSweet/applications/typescript
mv $HTMLEVATOR XSweet/applications/htmlevator
const tmp = require('tmp-promise')
const fs = require('fs')
const path = require('path')
const { execSync } = require('child_process')
const { execSync, execFileSync } = require('child_process')
const logger = require('@pubsweet/logger')
const { pubsubManager } = require('pubsweet-server')
// encode file to base64
const base64EncodeFile = path => fs.readFileSync(path).toString('base64')
......@@ -24,42 +26,85 @@ const imagesToBase64 = html => {
return html
}
const xsweetHandler = async job => {
// console.log('processing job', job.data.docx)
const buf = Buffer.from(job.data.docx.data, 'base64')
const { path: tmpDir, cleanup } = await tmp.dir({
prefix: '_conversion-',
unsafeCleanup: true,
dir: process.cwd(),
})
// console.log('Write the buffer to a temporary file')
fs.writeFileSync(path.join(tmpDir, job.data.docx.name), buf)
// console.log('Unzip that docx')
execSync(`unzip -o ${tmpDir}/${job.data.docx.name} -d ${tmpDir}`)
// console.log('Convert using a series of Saxon/XSLT steps')
execSync(`bash ${path.resolve(__dirname, 'execute_chain.sh')} ${tmpDir}`)
// console.log('Return the HTML5 output')
const html = fs.readFileSync(
path.join(tmpDir, 'outputs', '16HTML5.html'),
'utf8',
)
let processedHtml
const xsweetHandler = enablePubsub => async job => {
try {
processedHtml = imagesToBase64(html)
let pubsub
if (enablePubsub) {
logger.info(job.data.pubsubChannel, 'has started.')
pubsub = await pubsubManager.getPubsub()
pubsub.publish(job.data.pubsubChannel, {
docxToHTMLJob: {
status: 'DOCX to HTML conversion started',
},
})
}
const buf = Buffer.from(job.data.docx.data, 'base64')
const { path: tmpDir, cleanup } = await tmp.dir({
prefix: '_conversion-',
unsafeCleanup: true,
dir: process.cwd(),
})
fs.writeFileSync(path.join(tmpDir, job.data.docx.name), buf)
if (enablePubsub) {
pubsub.publish(job.data.pubsubChannel, {
docxToHTMLJob: {
status: 'Unzipping DOCX document',
},
})
}
execFileSync('unzip', [
'-o',
`${tmpDir}/${job.data.docx.name}`,
'-d',
tmpDir,
])
if (enablePubsub) {
pubsub.publish(job.data.pubsubChannel, {
docxToHTMLJob: { status: 'Converting DOCX using XSweet' },
})
}
execSync(`bash ${path.resolve(__dirname, 'execute_chain.sh')} ${tmpDir}`)
const html = fs.readFileSync(
path.join(tmpDir, 'outputs', 'HTML5.html'),
'utf8',
)
let processedHtml
try {
processedHtml = imagesToBase64(html)
} catch (e) {
processedHtml = html
}
await cleanup()
if (enablePubsub) {
logger.info(job.data.pubsubChannel, 'has completed.')
pubsub.publish(job.data.pubsubChannel, {
docxToHTMLJob: { status: 'Conversion complete' },
})
}
return { html: processedHtml }
} catch (e) {
processedHtml = html
// eslint-disable-next-line
console.log(e)
if (enablePubsub) {
const pubsub = await pubsubManager.getPubsub()
pubsub.publish(job.data.pubsubChannel, {
docxToHTMLJob: { status: 'Conversion error' },
})
}
return { html: null }
}
// console.log(html)
await cleanup()
return { html: processedHtml }
}
const handleJobs = async () => {
......@@ -70,7 +115,8 @@ const handleJobs = async () => {
const jobQueue = await connectToJobQueue()
// Subscribe to the job queue with an async handler
await jobQueue.subscribe('xsweet-*', xsweetHandler)
await jobQueue.subscribe('xsweet-*', xsweetHandler(false))
await jobQueue.subscribe('xsweetGraphQL', xsweetHandler(true))
}
handleJobs()
const path = require('path')
process.env.NODE_CONFIG_DIR = path.resolve(__dirname, '..', 'config')
// const log = require('why-is-node-running')
const WebSocket = require('ws')
Object.assign(global, {
WebSocket,
})
const { destroy } = require('pubsweet-server/src/graphql/pubsub')
const authentication = require('pubsweet-server/src/authentication')
const { startServer } = require('pubsweet-server')
const fs = require('fs')
const path = require('path')
const { SubscriptionClient } = require('subscriptions-transport-ws')
const superagent = require('superagent')
const wait = require('waait')
jest.setTimeout(60000)
describe('XSweet job', () => {
let server
let token
beforeAll(async () => {
server = await startServer()
token = authentication.token.create({ id: 1, username: 'test' })
})
afterAll(async done => {
await destroy()
await server.close()
setImmediate(() => server.emit('close'))
await wait(500)
done()
})
describe('Endpoint', () => {
it('returns html', async () => {
const file = fs.createReadStream(path.join(__dirname, '../src/test.docx'))
const { text } = await superagent
.post('http://localhost:4000/convertDocxToHTML')
.attach('docx', file)
expect(text).toMatch(/Testing conversion 123/)
})
})
describe('GraphQL subscriptions', () => {
// let user
let client
beforeAll(() => {
client = new SubscriptionClient(`ws://localhost:4000/subscriptions`, {
connectionParams: {
authToken: token,
},
})
})
afterAll(() => {
client.client.close()
})
it('can process it', async () => {
const { body } = await superagent
.post('http://localhost:4000/graphql')
.field(
'operations',
JSON.stringify({
operationName: null,
variables: { file: null },
query: `mutation createDocxToHTMLJob($file: Upload!) {
createDocxToHTMLJob(file: $file) {
status
id
}
}`,
}),
)
.field('map', JSON.stringify({ '0': ['variables.file'] }))
.attach(
'0',
fs.readFileSync(path.join(__dirname, '../src/test.docx')),
'test.docx',
)
.set('Authorization', `Bearer ${token}`)
expect(body.data.createDocxToHTMLJob.status).toBe('Uploading file')
const subscriptionPromise = new Promise((resolve, reject) => {
client
.request({
query: `subscription docxToHTMLJob($jobId: String!) {
docxToHTMLJob(jobId: $jobId) {
id
status
html
}
}
`,
variables: {
jobId: body.data.createDocxToHTMLJob.id,
},
})
.subscribe({
next: async res => {
if (res.data.docxToHTMLJob.status === 'Done') {
const { body } = await superagent
.post('http://localhost:4000/graphql')
.set('Content-Type', 'application/json')
.set('Authorization', `Bearer ${token}`)
.send({
variables: { jobId: res.data.docxToHTMLJob.id },
query: `query docxToHTMLJob($jobId: String!) {
docxToHTMLJob(jobId: $jobId) {
status
html
}
}`,
})
resolve(body.data.docxToHTMLJob.html)
}
return true