adapt-authoring-integration-tests 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/releases.yml +32 -0
- package/.github/workflows/standardjs.yml +12 -0
- package/README.md +73 -0
- package/adapt-authoring.json +6 -0
- package/bin/run.js +83 -0
- package/fixtures/.gitkeep +0 -0
- package/fixtures/manifest.example.json +3 -0
- package/lib/app.js +49 -0
- package/lib/db.js +28 -0
- package/lib/fixtures.js +136 -0
- package/package.json +36 -0
- package/tests/adaptframework-build.spec.js +120 -0
- package/tests/adaptframework-import.spec.js +193 -0
- package/tests/api.spec.js +47 -0
- package/tests/auth.spec.js +306 -0
- package/tests/content.spec.js +179 -0
- package/tests/lib.spec.js +43 -0
- package/tests/mongodb.spec.js +110 -0
- package/tests/roles.spec.js +162 -0
- package/tests/users.spec.js +113 -0
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
name: Release
|
|
2
|
+
on:
|
|
3
|
+
push:
|
|
4
|
+
branches:
|
|
5
|
+
- master
|
|
6
|
+
|
|
7
|
+
jobs:
|
|
8
|
+
release:
|
|
9
|
+
name: Release
|
|
10
|
+
runs-on: ubuntu-latest
|
|
11
|
+
permissions:
|
|
12
|
+
contents: write # to be able to publish a GitHub release
|
|
13
|
+
issues: write # to be able to comment on released issues
|
|
14
|
+
pull-requests: write # to be able to comment on released pull requests
|
|
15
|
+
id-token: write # to enable use of OIDC for trusted publishing and npm provenance
|
|
16
|
+
steps:
|
|
17
|
+
- name: Checkout
|
|
18
|
+
uses: actions/checkout@v3
|
|
19
|
+
with:
|
|
20
|
+
fetch-depth: 0
|
|
21
|
+
- name: Setup Node.js
|
|
22
|
+
uses: actions/setup-node@v3
|
|
23
|
+
with:
|
|
24
|
+
node-version: 'lts/*'
|
|
25
|
+
- name: Update npm
|
|
26
|
+
run: npm install -g npm@latest
|
|
27
|
+
- name: Install dependencies
|
|
28
|
+
run: npm install
|
|
29
|
+
- name: Release
|
|
30
|
+
env:
|
|
31
|
+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
32
|
+
run: npx semantic-release
|
package/README.md
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
# Adapt Authoring Integration Tests
|
|
2
|
+
|
|
3
|
+
Integration test suite for the Adapt authoring tool. Tests the full application with a real database, covering import, build, and export workflows.
|
|
4
|
+
|
|
5
|
+
## Prerequisites
|
|
6
|
+
|
|
7
|
+
- Node.js 24+
|
|
8
|
+
- MongoDB 8.0+
|
|
9
|
+
- The adapt-authoring app with dependencies installed
|
|
10
|
+
|
|
11
|
+
## Setup
|
|
12
|
+
|
|
13
|
+
Create a fixtures directory with a `manifest.json` and your test fixture files:
|
|
14
|
+
|
|
15
|
+
```bash
|
|
16
|
+
mkdir /path/to/fixtures
|
|
17
|
+
echo '{ "course-export": "course-export.zip" }' > /path/to/fixtures/manifest.json
|
|
18
|
+
cp /path/to/your-course-export.zip /path/to/fixtures/course-export.zip
|
|
19
|
+
```
|
|
20
|
+
|
|
21
|
+
See `fixtures/manifest.example.json` for the expected format.
|
|
22
|
+
|
|
23
|
+
## Running tests
|
|
24
|
+
|
|
25
|
+
From the **adapt-authoring app directory**:
|
|
26
|
+
|
|
27
|
+
```bash
|
|
28
|
+
# Set required environment variables
|
|
29
|
+
export ADAPT_AUTHORING_AUTH__tokenSecret='testsecret'
|
|
30
|
+
export ADAPT_AUTHORING_MONGODB__connectionUri='mongodb://0.0.0.0/adapt-authoring-test'
|
|
31
|
+
export ADAPT_AUTHORING_SERVER__host='localhost'
|
|
32
|
+
export ADAPT_AUTHORING_SERVER__port='5678'
|
|
33
|
+
export ADAPT_AUTHORING_SERVER__url='http://localhost:5678'
|
|
34
|
+
export ADAPT_AUTHORING_SESSIONS__secret='testsessionssecret'
|
|
35
|
+
|
|
36
|
+
# Run all integration tests
|
|
37
|
+
FIXTURES_DIR=/path/to/fixtures npx at-integration-test
|
|
38
|
+
|
|
39
|
+
# Run only import tests
|
|
40
|
+
FIXTURES_DIR=/path/to/fixtures npx at-integration-test --import-only
|
|
41
|
+
|
|
42
|
+
# Run only build tests
|
|
43
|
+
FIXTURES_DIR=/path/to/fixtures npx at-integration-test --build-only
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
## Custom tests (e.g. client testing)
|
|
47
|
+
|
|
48
|
+
Point `CUSTOM_DIR` to a directory containing custom `fixtures/` and `tests/`:
|
|
49
|
+
|
|
50
|
+
```
|
|
51
|
+
my-client-tests/
|
|
52
|
+
fixtures/
|
|
53
|
+
manifest.json
|
|
54
|
+
client-course.zip
|
|
55
|
+
tests/
|
|
56
|
+
client-specific.spec.js
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
```bash
|
|
60
|
+
CUSTOM_DIR=/path/to/my-client-tests npx at-integration-test
|
|
61
|
+
```
|
|
62
|
+
|
|
63
|
+
Custom fixtures are merged with the standard fixtures (custom takes priority on key collisions). Custom tests are run alongside the standard tests.
|
|
64
|
+
|
|
65
|
+
## CI
|
|
66
|
+
|
|
67
|
+
The GitHub Actions workflow runs weekly and can be triggered manually via `workflow_dispatch`. It:
|
|
68
|
+
|
|
69
|
+
1. Checks out both this repo and the main adapt-authoring repo
|
|
70
|
+
2. Downloads test fixtures from a separate repository
|
|
71
|
+
3. Starts MongoDB via `supercharge/mongodb-github-action`
|
|
72
|
+
4. Installs app dependencies
|
|
73
|
+
5. Runs all integration tests
|
package/bin/run.js
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Integration test runner.
|
|
5
|
+
*
|
|
6
|
+
* Must be run from the adapt-authoring app directory (where node_modules are installed).
|
|
7
|
+
*
|
|
8
|
+
* Usage:
|
|
9
|
+
* npx at-integration-test # run all tests
|
|
10
|
+
* npx at-integration-test auth # run auth.spec.js
|
|
11
|
+
* npx at-integration-test mongodb content # run mongodb.spec.js and content.spec.js
|
|
12
|
+
* CUSTOM_DIR=/path/to/custom npx at-integration-test
|
|
13
|
+
*
|
|
14
|
+
* Environment variables:
|
|
15
|
+
* CUSTOM_DIR - Path to a directory containing additional fixtures/ and/or tests/
|
|
16
|
+
* Custom fixtures override built-in fixtures when keys collide.
|
|
17
|
+
*/
|
|
18
|
+
|
|
19
|
+
import { execSync } from 'child_process'
|
|
20
|
+
import fs from 'fs'
|
|
21
|
+
import os from 'os'
|
|
22
|
+
import path from 'path'
|
|
23
|
+
import { fileURLToPath } from 'url'
|
|
24
|
+
import { dropTestDb } from '../lib/db.js'
|
|
25
|
+
|
|
26
|
+
const ROOT = path.resolve(path.dirname(fileURLToPath(import.meta.url)), '..')
|
|
27
|
+
const testsDir = path.join(ROOT, 'tests')
|
|
28
|
+
const customDir = process.env.CUSTOM_DIR
|
|
29
|
+
? path.isAbsolute(process.env.CUSTOM_DIR) ? process.env.CUSTOM_DIR : path.resolve(process.env.CUSTOM_DIR)
|
|
30
|
+
: undefined
|
|
31
|
+
|
|
32
|
+
// Collect test file paths
|
|
33
|
+
const testFiles = []
|
|
34
|
+
const args = process.argv.slice(2)
|
|
35
|
+
|
|
36
|
+
if (args.length > 0) {
|
|
37
|
+
for (const name of args) {
|
|
38
|
+
const specFile = path.join(testsDir, `${name}.spec.js`)
|
|
39
|
+
if (!fs.existsSync(specFile)) {
|
|
40
|
+
console.error(`Test not found: ${name} (expected ${specFile})`)
|
|
41
|
+
process.exit(1)
|
|
42
|
+
}
|
|
43
|
+
testFiles.push(specFile)
|
|
44
|
+
}
|
|
45
|
+
} else {
|
|
46
|
+
const files = fs.readdirSync(testsDir).filter(f => f.endsWith('.spec.js')).sort()
|
|
47
|
+
testFiles.push(...files.map(f => path.join(testsDir, f)))
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// Add custom tests if CUSTOM_DIR is set
|
|
51
|
+
const customTestFiles = []
|
|
52
|
+
if (customDir) {
|
|
53
|
+
const customTestsDir = path.join(customDir, 'tests')
|
|
54
|
+
if (fs.existsSync(customTestsDir)) {
|
|
55
|
+
const customFiles = fs.readdirSync(customTestsDir).filter(f => f.endsWith('.spec.js')).sort()
|
|
56
|
+
customTestFiles.push(...customFiles.map(f => path.join(customTestsDir, f)))
|
|
57
|
+
testFiles.push(...customTestFiles)
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// Drop the test database to ensure a clean state before the app boots.
|
|
62
|
+
// Stale records (e.g. contentplugins from a previous run) can cause
|
|
63
|
+
// initPlugins to look for plugin files that no longer exist.
|
|
64
|
+
await dropTestDb()
|
|
65
|
+
|
|
66
|
+
// Generate a single entry file that imports all specs so the app boots once
|
|
67
|
+
const imports = testFiles.map(f => `import '${f}'`).join('\n')
|
|
68
|
+
const entryFile = path.join(os.tmpdir(), `aat-test-entry-${Date.now()}.js`)
|
|
69
|
+
fs.writeFileSync(entryFile, imports + '\n')
|
|
70
|
+
|
|
71
|
+
const cmd = `node --test --test-force-exit '${entryFile}'`
|
|
72
|
+
|
|
73
|
+
console.log(`Tests:\n${testFiles.map(f => ` ${path.basename(f)}`).join('\n')}`)
|
|
74
|
+
if (customTestFiles.length) {
|
|
75
|
+
console.log(`Custom scripts:\n${customTestFiles.map(f => ` ${path.basename(f)}`).join('\n')}`)
|
|
76
|
+
}
|
|
77
|
+
console.log()
|
|
78
|
+
|
|
79
|
+
try {
|
|
80
|
+
execSync(cmd, { stdio: 'inherit', env: process.env })
|
|
81
|
+
} finally {
|
|
82
|
+
try { fs.unlinkSync(entryFile) } catch {}
|
|
83
|
+
}
|
|
File without changes
|
package/lib/app.js
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import { App } from 'adapt-authoring-core'
|
|
2
|
+
|
|
3
|
+
let app
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Boots the Adapt authoring app and returns the App instance.
|
|
7
|
+
* Caches the instance so subsequent calls return the same app.
|
|
8
|
+
* @returns {Promise<App>}
|
|
9
|
+
*/
|
|
10
|
+
export async function getApp () {
|
|
11
|
+
if (app) return app
|
|
12
|
+
process.env.NODE_ENV = process.env.NODE_ENV || 'testing'
|
|
13
|
+
app = await App.instance.onReady()
|
|
14
|
+
return app
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Waits for a named module to be ready and returns it.
|
|
19
|
+
* @param {string} name - Module name (e.g. 'adaptframework', 'content')
|
|
20
|
+
* @returns {Promise<Object>}
|
|
21
|
+
*/
|
|
22
|
+
export async function getModule (name) {
|
|
23
|
+
const a = await getApp()
|
|
24
|
+
return a.waitForModule(name)
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* Default collections cleaned between test runs.
|
|
29
|
+
* Must include 'contentplugins' to avoid stale plugin records causing
|
|
30
|
+
* MISSING_SCHEMA errors on subsequent runs.
|
|
31
|
+
* @type {string[]}
|
|
32
|
+
*/
|
|
33
|
+
export const DEFAULT_CLEAN_COLLECTIONS = ['content', 'assets', 'courseassets', 'tags', 'adaptbuilds', 'contentplugins']
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* Cleans up test data from the database.
|
|
37
|
+
* Call this in after() hooks to leave the DB clean.
|
|
38
|
+
* @param {string[]} collections - Collection names to clear
|
|
39
|
+
*/
|
|
40
|
+
export async function cleanDb (collections = DEFAULT_CLEAN_COLLECTIONS) {
|
|
41
|
+
const mongodb = await getModule('mongodb')
|
|
42
|
+
for (const name of collections) {
|
|
43
|
+
try {
|
|
44
|
+
await mongodb.getCollection(name).deleteMany({})
|
|
45
|
+
} catch {
|
|
46
|
+
// collection may not exist yet, that's fine
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
}
|
package/lib/db.js
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import path from 'path'
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Drops the test database to ensure a clean state before the app boots.
|
|
5
|
+
*
|
|
6
|
+
* Stale records (e.g. contentplugins from a previous run) can cause
|
|
7
|
+
* initPlugins to look for plugin files that no longer exist.
|
|
8
|
+
*
|
|
9
|
+
* @param {string} [cwd=process.cwd()] - Working directory to resolve config from
|
|
10
|
+
* @returns {Promise<boolean>} true if the database was dropped, false otherwise
|
|
11
|
+
*/
|
|
12
|
+
export async function dropTestDb (cwd = process.cwd()) {
|
|
13
|
+
try {
|
|
14
|
+
const configPath = path.resolve(cwd, 'conf', `${process.env.NODE_ENV || 'testing'}.config.js`)
|
|
15
|
+
const config = (await import(configPath)).default
|
|
16
|
+
const uri = config['adapt-authoring-mongodb']?.connectionUri
|
|
17
|
+
if (!uri) return false
|
|
18
|
+
const { MongoClient } = await import('mongodb')
|
|
19
|
+
const client = new MongoClient(uri)
|
|
20
|
+
await client.connect()
|
|
21
|
+
await client.db().dropDatabase()
|
|
22
|
+
await client.close()
|
|
23
|
+
return true
|
|
24
|
+
} catch (e) {
|
|
25
|
+
// not fatal – the DB may not exist yet on first run
|
|
26
|
+
return false
|
|
27
|
+
}
|
|
28
|
+
}
|
package/lib/fixtures.js
ADDED
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
import fs from 'fs/promises'
|
|
2
|
+
import os from 'os'
|
|
3
|
+
import path from 'path'
|
|
4
|
+
import { fileURLToPath } from 'url'
|
|
5
|
+
|
|
6
|
+
const FIXTURES_DIR = path.resolve(path.dirname(fileURLToPath(import.meta.url)), '..', 'fixtures')
|
|
7
|
+
|
|
8
|
+
let manifest
|
|
9
|
+
let resolvedDirs
|
|
10
|
+
let tempDir
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Returns the custom fixtures directory path (if CUSTOM_DIR is set).
|
|
14
|
+
* @returns {string|undefined}
|
|
15
|
+
*/
|
|
16
|
+
function getCustomFixturesDir () {
|
|
17
|
+
return process.env.CUSTOM_DIR && path.join(process.env.CUSTOM_DIR, 'fixtures')
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Reads a manifest.json, returning null if not found.
|
|
22
|
+
* @param {string} dir - Directory containing the manifest
|
|
23
|
+
* @returns {Promise<Object|null>}
|
|
24
|
+
*/
|
|
25
|
+
async function readManifest (dir) {
|
|
26
|
+
try {
|
|
27
|
+
return JSON.parse(await fs.readFile(path.join(dir, 'manifest.json'), 'utf8'))
|
|
28
|
+
} catch (e) {
|
|
29
|
+
if (e.code === 'ENOENT') return null
|
|
30
|
+
throw e
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* Loads and caches the merged manifest from the built-in fixtures directory
|
|
36
|
+
* and optional custom directory (CUSTOM_DIR/fixtures/).
|
|
37
|
+
* Custom fixtures override built-in fixtures when keys collide.
|
|
38
|
+
* @returns {Promise<Object>}
|
|
39
|
+
*/
|
|
40
|
+
export async function getManifest () {
|
|
41
|
+
if (manifest) return manifest
|
|
42
|
+
|
|
43
|
+
const customDir = getCustomFixturesDir()
|
|
44
|
+
|
|
45
|
+
const standard = await readManifest(FIXTURES_DIR)
|
|
46
|
+
const custom = customDir ? await readManifest(customDir) : null
|
|
47
|
+
|
|
48
|
+
if (!standard && !custom) {
|
|
49
|
+
const msg = [
|
|
50
|
+
`No fixtures manifest found at ${path.join(FIXTURES_DIR, 'manifest.json')}`,
|
|
51
|
+
'',
|
|
52
|
+
'To set up fixtures:',
|
|
53
|
+
' 1. Create a manifest.json in the fixtures directory',
|
|
54
|
+
' 2. Map fixture names to files, e.g.: { "course-export": "course-export.zip" }',
|
|
55
|
+
' 3. Place the fixture files alongside the manifest',
|
|
56
|
+
'',
|
|
57
|
+
'To provide custom fixtures:',
|
|
58
|
+
' CUSTOM_DIR=/path/to/custom npx at-integration-test',
|
|
59
|
+
' (expects custom/fixtures/manifest.json)'
|
|
60
|
+
].join('\n')
|
|
61
|
+
throw new Error(msg)
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
manifest = {}
|
|
65
|
+
resolvedDirs = {}
|
|
66
|
+
|
|
67
|
+
if (standard) {
|
|
68
|
+
for (const [key, file] of Object.entries(standard)) {
|
|
69
|
+
manifest[key] = file
|
|
70
|
+
resolvedDirs[key] = FIXTURES_DIR
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
if (custom) {
|
|
74
|
+
for (const [key, file] of Object.entries(custom)) {
|
|
75
|
+
manifest[key] = file
|
|
76
|
+
resolvedDirs[key] = customDir
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
return manifest
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
/**
|
|
84
|
+
* Returns a temp directory for fixture copies, creating it on first call.
|
|
85
|
+
* @returns {Promise<string>}
|
|
86
|
+
*/
|
|
87
|
+
async function getTempDir () {
|
|
88
|
+
if (!tempDir) {
|
|
89
|
+
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'aat-fixtures-'))
|
|
90
|
+
}
|
|
91
|
+
return tempDir
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
/**
|
|
95
|
+
* Resolves a fixture key to an absolute file path.
|
|
96
|
+
* Copies the fixture to a temp directory so the original is preserved
|
|
97
|
+
* (the import process may consume/delete the source file).
|
|
98
|
+
* @param {string} key - Logical fixture name from manifest (e.g. "course-export")
|
|
99
|
+
* @returns {Promise<string>} Absolute path to the copied fixture file
|
|
100
|
+
* @throws {Error} If the key is not found in the manifest or the file doesn't exist
|
|
101
|
+
*/
|
|
102
|
+
export async function getFixture (key) {
|
|
103
|
+
const m = await getManifest()
|
|
104
|
+
if (!m[key]) {
|
|
105
|
+
throw new Error(`Fixture "${key}" not found in manifest. Available: ${Object.keys(m).join(', ')}`)
|
|
106
|
+
}
|
|
107
|
+
const fixturePath = path.join(resolvedDirs[key], m[key])
|
|
108
|
+
try {
|
|
109
|
+
await fs.access(fixturePath)
|
|
110
|
+
} catch {
|
|
111
|
+
throw new Error(`Fixture file not found: ${fixturePath}`)
|
|
112
|
+
}
|
|
113
|
+
const tmp = await getTempDir()
|
|
114
|
+
const destPath = path.join(tmp, `${key}-${Date.now()}-${m[key]}`)
|
|
115
|
+
await fs.copyFile(fixturePath, destPath)
|
|
116
|
+
return destPath
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
/**
|
|
120
|
+
* Resets the cached manifest (useful if switching fixtures mid-test).
|
|
121
|
+
*/
|
|
122
|
+
export function resetManifest () {
|
|
123
|
+
manifest = undefined
|
|
124
|
+
resolvedDirs = undefined
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
/**
|
|
128
|
+
* Removes the temp directory used for fixture copies.
|
|
129
|
+
* Call in a global teardown or after() hook if desired.
|
|
130
|
+
*/
|
|
131
|
+
export async function cleanupFixtures () {
|
|
132
|
+
if (tempDir) {
|
|
133
|
+
await fs.rm(tempDir, { recursive: true, force: true })
|
|
134
|
+
tempDir = undefined
|
|
135
|
+
}
|
|
136
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "adapt-authoring-integration-tests",
|
|
3
|
+
"version": "0.0.1",
|
|
4
|
+
"description": "Integration test suite for the Adapt authoring tool",
|
|
5
|
+
"repository": "github:adapt-security/adapt-authoring-integration-tests",
|
|
6
|
+
"license": "GPL-3.0",
|
|
7
|
+
"type": "module",
|
|
8
|
+
"bin": {
|
|
9
|
+
"at-integration-test": "./bin/run.js"
|
|
10
|
+
},
|
|
11
|
+
"devDependencies": {
|
|
12
|
+
"@semantic-release/git": "^10.0.1",
|
|
13
|
+
"conventional-changelog-eslint": "^6.0.0",
|
|
14
|
+
"semantic-release": "^25.0.2",
|
|
15
|
+
"standard": "^17.1.0"
|
|
16
|
+
},
|
|
17
|
+
"release": {
|
|
18
|
+
"plugins": [
|
|
19
|
+
[
|
|
20
|
+
"@semantic-release/commit-analyzer",
|
|
21
|
+
{
|
|
22
|
+
"preset": "eslint"
|
|
23
|
+
}
|
|
24
|
+
],
|
|
25
|
+
[
|
|
26
|
+
"@semantic-release/release-notes-generator",
|
|
27
|
+
{
|
|
28
|
+
"preset": "eslint"
|
|
29
|
+
}
|
|
30
|
+
],
|
|
31
|
+
"@semantic-release/npm",
|
|
32
|
+
"@semantic-release/github",
|
|
33
|
+
"@semantic-release/git"
|
|
34
|
+
]
|
|
35
|
+
}
|
|
36
|
+
}
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
import { describe, it, before, after } from 'node:test'
|
|
2
|
+
import assert from 'node:assert/strict'
|
|
3
|
+
import fs from 'fs/promises'
|
|
4
|
+
import { getApp, getModule, cleanDb } from '../lib/app.js'
|
|
5
|
+
import { getFixture } from '../lib/fixtures.js'
|
|
6
|
+
|
|
7
|
+
let framework
|
|
8
|
+
let courseId
|
|
9
|
+
|
|
10
|
+
describe('AdaptFramework build', () => {
|
|
11
|
+
before(async () => {
|
|
12
|
+
await getApp()
|
|
13
|
+
framework = await getModule('adaptframework')
|
|
14
|
+
await getModule('content')
|
|
15
|
+
|
|
16
|
+
// Import a course to use as build input
|
|
17
|
+
const fixturePath = await getFixture('course-export')
|
|
18
|
+
const importer = await framework.importCourse({
|
|
19
|
+
importPath: fixturePath,
|
|
20
|
+
userId: '000000000000000000000000',
|
|
21
|
+
tags: [],
|
|
22
|
+
importContent: true,
|
|
23
|
+
importPlugins: true,
|
|
24
|
+
migrateContent: true,
|
|
25
|
+
updatePlugins: false,
|
|
26
|
+
removeSource: false
|
|
27
|
+
})
|
|
28
|
+
courseId = importer.summary.courseId.toString()
|
|
29
|
+
})
|
|
30
|
+
|
|
31
|
+
after(async () => {
|
|
32
|
+
await cleanDb()
|
|
33
|
+
})
|
|
34
|
+
|
|
35
|
+
describe('Export', () => {
|
|
36
|
+
let buildResult
|
|
37
|
+
|
|
38
|
+
it('should export a course without errors', async () => {
|
|
39
|
+
buildResult = await framework.buildCourse({
|
|
40
|
+
action: 'export',
|
|
41
|
+
courseId,
|
|
42
|
+
userId: '000000000000000000000000'
|
|
43
|
+
})
|
|
44
|
+
assert.ok(buildResult, 'build should return a result')
|
|
45
|
+
assert.ok(buildResult.buildData, 'result should include buildData')
|
|
46
|
+
})
|
|
47
|
+
|
|
48
|
+
it('should have created a build record', async () => {
|
|
49
|
+
const mongodb = await getModule('mongodb')
|
|
50
|
+
const [record] = await mongodb.find('adaptbuilds', { _id: buildResult.buildData._id })
|
|
51
|
+
assert.ok(record, 'build record should exist in database')
|
|
52
|
+
assert.equal(record.action, 'export')
|
|
53
|
+
assert.equal(record.courseId.toString(), courseId)
|
|
54
|
+
})
|
|
55
|
+
|
|
56
|
+
it('should have created an output file', async () => {
|
|
57
|
+
const location = buildResult.buildData.location
|
|
58
|
+
assert.ok(location, 'buildData should have a location')
|
|
59
|
+
const stat = await fs.stat(location)
|
|
60
|
+
assert.ok(stat.size > 0, 'output file should not be empty')
|
|
61
|
+
})
|
|
62
|
+
|
|
63
|
+
it('should have created a zip with substantial content', async () => {
|
|
64
|
+
const stat = await fs.stat(buildResult.buildData.location)
|
|
65
|
+
assert.ok(stat.size > 1000, 'export zip should have substantial content')
|
|
66
|
+
})
|
|
67
|
+
})
|
|
68
|
+
|
|
69
|
+
describe('Preview', () => {
|
|
70
|
+
let buildResult
|
|
71
|
+
|
|
72
|
+
it('should create a preview build without errors', async () => {
|
|
73
|
+
buildResult = await framework.buildCourse({
|
|
74
|
+
action: 'preview',
|
|
75
|
+
courseId,
|
|
76
|
+
userId: '000000000000000000000000'
|
|
77
|
+
})
|
|
78
|
+
assert.ok(buildResult, 'build should return a result')
|
|
79
|
+
assert.ok(buildResult.buildData, 'result should include buildData')
|
|
80
|
+
assert.equal(buildResult.isPreview, true, 'should be marked as preview')
|
|
81
|
+
})
|
|
82
|
+
|
|
83
|
+
it('should have created the build output directory', async () => {
|
|
84
|
+
const location = buildResult.buildData.location
|
|
85
|
+
assert.ok(location, 'buildData should have a location')
|
|
86
|
+
const stat = await fs.stat(location)
|
|
87
|
+
assert.ok(stat.isDirectory(), 'preview output should be a directory')
|
|
88
|
+
})
|
|
89
|
+
|
|
90
|
+
it('should contain index.html', async () => {
|
|
91
|
+
const indexPath = `${buildResult.buildData.location}/index.html`
|
|
92
|
+
const stat = await fs.stat(indexPath)
|
|
93
|
+
assert.ok(stat.size > 0, 'index.html should exist and not be empty')
|
|
94
|
+
})
|
|
95
|
+
})
|
|
96
|
+
|
|
97
|
+
describe('Publish', () => {
|
|
98
|
+
let buildResult
|
|
99
|
+
|
|
100
|
+
it('should publish a course without errors', async () => {
|
|
101
|
+
buildResult = await framework.buildCourse({
|
|
102
|
+
action: 'publish',
|
|
103
|
+
courseId,
|
|
104
|
+
userId: '000000000000000000000000'
|
|
105
|
+
})
|
|
106
|
+
assert.ok(buildResult, 'build should return a result')
|
|
107
|
+
assert.ok(buildResult.buildData, 'result should include buildData')
|
|
108
|
+
})
|
|
109
|
+
|
|
110
|
+
it('should have created a zip file', async () => {
|
|
111
|
+
const location = buildResult.buildData.location
|
|
112
|
+
const stat = await fs.stat(location)
|
|
113
|
+
assert.ok(stat.size > 1000, 'publish zip should have substantial content')
|
|
114
|
+
})
|
|
115
|
+
|
|
116
|
+
it('should have recorded build versions', async () => {
|
|
117
|
+
assert.ok(buildResult.buildData.versions, 'buildData should include versions')
|
|
118
|
+
})
|
|
119
|
+
})
|
|
120
|
+
})
|