@exodus/test 1.0.0-rc.46 → 1.0.0-rc.47
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/jest.js +4 -0
- package/bin/reporter.js +183 -0
- package/bundler/babel-worker.cjs +62 -0
- package/bundler/bundle.js +292 -0
- package/bundler/modules/ansi-styles.cjs +49 -0
- package/bundler/modules/assert-strict.cjs +1 -0
- package/bundler/modules/child_process.cjs +10 -0
- package/bundler/modules/crypto.cjs +5 -0
- package/bundler/modules/empty/function-throw.cjs +4 -0
- package/bundler/modules/empty/module-throw.cjs +1 -0
- package/bundler/modules/fs-promises.cjs +1 -0
- package/bundler/modules/fs.cjs +88 -0
- package/bundler/modules/globals.cjs +185 -0
- package/bundler/modules/http.cjs +119 -0
- package/bundler/modules/https.cjs +11 -0
- package/bundler/modules/jest-message-util.js +5 -0
- package/bundler/modules/jest-util.js +22 -0
- package/bundler/modules/node-buffer.cjs +3 -0
- package/bundler/modules/util-format.cjs +41 -0
- package/bundler/modules/ws.cjs +20 -0
- package/loaders/babel.cjs +8 -0
- package/loaders/typescript.js +3 -0
- package/loaders/typescript.loader.js +24 -0
- package/package.json +42 -2
- package/src/dark.cjs +145 -0
- package/src/engine.js +22 -0
- package/src/engine.node.cjs +41 -0
- package/src/engine.pure.cjs +484 -0
- package/src/engine.select.cjs +5 -0
- package/src/jest.config.fs.js +54 -0
- package/src/jest.config.js +134 -0
- package/src/jest.environment.js +33 -0
- package/src/jest.fn.js +167 -0
- package/src/jest.js +257 -0
- package/src/jest.mock.js +277 -0
- package/src/jest.snapshot.js +182 -0
- package/src/jest.timers.js +98 -0
- package/src/node.js +10 -0
- package/src/replay.js +103 -0
- package/src/tape.cjs +15 -0
- package/src/tape.js +160 -0
- package/src/version.js +18 -0
package/bin/jest.js
ADDED
package/bin/reporter.js
ADDED
|
@@ -0,0 +1,183 @@
|
|
|
1
|
+
import assert from 'node:assert/strict'
|
|
2
|
+
import { inspect } from 'node:util'
|
|
3
|
+
import { relative, resolve } from 'node:path'
|
|
4
|
+
import { spec as SpecReporter } from 'node:test/reporters'
|
|
5
|
+
|
|
6
|
+
const { FORCE_COLOR, CI, GITHUB_WORKSPACE, LERNA_PACKAGE_NAME } = process.env
|
|
7
|
+
const haveColors = process.stdout.hasColors?.() || FORCE_COLOR === '1' // 0 is already handled by hasColors()
|
|
8
|
+
const colors = new Map(Object.entries(inspect.colors))
|
|
9
|
+
const dim = CI ? 'gray' : 'dim'
|
|
10
|
+
|
|
11
|
+
const uriReplacer = (x) => `%${x.codePointAt(0).toString(16).padStart(2, '0').toUpperCase()}`
|
|
12
|
+
const escapeGitHubValue = (k, v) => `${k}=${String(v ?? '').replace(/[%\r\n:,]/gu, uriReplacer)}`
|
|
13
|
+
const escapeGitHub = (s) => String(s || 'Unknown error').replace(/[%\r\n]/gu, uriReplacer)
|
|
14
|
+
const serializeGitHub = (entries) => entries.map(([k, v]) => escapeGitHubValue(k, v)).join(',')
|
|
15
|
+
|
|
16
|
+
export const color = (text, color) => {
|
|
17
|
+
if (!haveColors || text === '') return text
|
|
18
|
+
if (!colors.has(color)) throw new Error(`Unknown color: ${color}`)
|
|
19
|
+
const [start, end] = colors.get(color)
|
|
20
|
+
return `\x1B[${start}m${text}\x1B[${end}m`
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
// Used for pure engine output formatting
|
|
24
|
+
export const format = (chunk) => {
|
|
25
|
+
if (!haveColors) return chunk
|
|
26
|
+
return chunk
|
|
27
|
+
.replaceAll(/^✔ PASS /gmu, color('✔ PASS ', 'green'))
|
|
28
|
+
.replaceAll(/^⏭ SKIP /gmu, color('⏭ SKIP ', dim))
|
|
29
|
+
.replaceAll(/^✖ FAIL /gmu, color('✖ FAIL ', 'red'))
|
|
30
|
+
.replaceAll(/^⚠ WARN /gmu, color('⚠ WARN ', 'blue'))
|
|
31
|
+
.replaceAll(/^‼ FATAL /gmu, `${color('‼', 'red')} ${color(' FATAL ', 'bgRed')} `)
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
const formatTime = (ms) => (ms ? color(` (${ms}ms)`, dim) : '')
|
|
35
|
+
const formatSuffix = (d) => `${formatTime(d.details.duration_ms)}${d.todo ? ' # TODO' : ''}`
|
|
36
|
+
|
|
37
|
+
const groupCI = CI && !process.execArgv.includes('--watch') && !LERNA_PACKAGE_NAME // lerna+nx groups already
|
|
38
|
+
export const timeLabel = color('Total time', dim)
|
|
39
|
+
export const head = groupCI ? () => {} : (file) => console.log(color(`# ${file}`, 'bold'))
|
|
40
|
+
export const middle = (file, ok, ms) => {
|
|
41
|
+
if (!groupCI) return
|
|
42
|
+
console.log(`::group::${ok ? '✅' : '❌'} ${color(file, 'bold')}${formatTime(ms)}`)
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
export const tail = groupCI ? () => console.log('::endgroup::') : () => {}
|
|
46
|
+
export const summary = (files, failures) => {
|
|
47
|
+
if (failures.length > 0) {
|
|
48
|
+
const [total, passed, failed] = [files.length, files.length - failures.length, failures.length]
|
|
49
|
+
const failLine = color(`${failed} / ${total}`, 'red')
|
|
50
|
+
const passLine = color(`${passed} / ${total}`, 'green')
|
|
51
|
+
const suffix = passed > 0 ? color(` (passed: ${passLine})`, dim) : ''
|
|
52
|
+
console.log(`${color('Test suites failed:', 'bold')} ${failLine}${suffix}`)
|
|
53
|
+
console.log(color('Failed test suites:', 'red'))
|
|
54
|
+
for (const file of failures) console.log(` ${file}`) // joining with \n can get truncated, too big
|
|
55
|
+
} else {
|
|
56
|
+
console.log(color(`All ${files.length} test suites passed`, 'green'))
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
const cleanLine = (line) =>
|
|
61
|
+
line.trimStart().startsWith('at ') ? line.replace(`(file://${GITHUB_WORKSPACE}/`, '(') : line
|
|
62
|
+
const cleanWorkspace = (e) => (CI && GITHUB_WORKSPACE ? e.split('\n').map(cleanLine).join('\n') : e)
|
|
63
|
+
const SKIPPED_TRACE_LINES =
|
|
64
|
+
/\n^(\x1B\[90m)? +at [ a-zA-Z.]+ \(node:(async_hooks|internal\/test_runner\/test):\d+:\d+\)(\x1B\[39m)?$/gmu // eslint-disable-line no-control-regex
|
|
65
|
+
const notPrintedError = (e) => e?.code === 'ERR_TEST_FAILURE' && e?.failureType === 'subtestsFailed' // skipped from printing details
|
|
66
|
+
const extractError = ({ details: { error }, ...data }, file) => {
|
|
67
|
+
if (!error) return ''
|
|
68
|
+
if (error.cause) delete error.cause.matcherResult // eslint-disable-line @exodus/mutable/no-param-reassign-prop-only
|
|
69
|
+
const selected = error.cause || error
|
|
70
|
+
const body = inspect(selected, { colors: haveColors }).replaceAll(SKIPPED_TRACE_LINES, '')
|
|
71
|
+
|
|
72
|
+
let loc = { file, line: data.line, col: data.column }
|
|
73
|
+
const validLine = (l) => l.startsWith('at ') && l.replace(/:\d+:\d+\)$/, '').endsWith(`/${file}`)
|
|
74
|
+
const line = (selected.stack || '').split('\n').find((l) => validLine(l.trimStart()))
|
|
75
|
+
const match = line?.match(/:(\d+):(\d+)\)$/)
|
|
76
|
+
if (match) loc = { file, line: Number(match[1]), col: Number(match[2]) }
|
|
77
|
+
|
|
78
|
+
return { body: cleanWorkspace(body), loc }
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
try {
|
|
82
|
+
// Welp, in some cases there is no other way to tell the entry point
|
|
83
|
+
// E.g. when the test file is just an import of another one, the reported 'file' is the imported one
|
|
84
|
+
// We want to know the original entry points instead
|
|
85
|
+
const runner = await import('node:internal/test_runner/runner') // eslint-disable-line @exodus/import/no-unresolved
|
|
86
|
+
const { FileTest } = runner.default || runner
|
|
87
|
+
const { addToReport } = FileTest.prototype
|
|
88
|
+
FileTest.prototype.addToReport = function (item, ...rest) {
|
|
89
|
+
if (item?.type === 'test:start' && !item.data.entry) {
|
|
90
|
+
item.data.entry = this.loc?.file || (this.name && resolve(this.name)) // eslint-disable-line @exodus/mutable/no-param-reassign-prop-only
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
return addToReport.call(this, item, ...rest)
|
|
94
|
+
}
|
|
95
|
+
} catch {}
|
|
96
|
+
|
|
97
|
+
export default async function nodeTestReporterExodus(source) {
|
|
98
|
+
const spec = new SpecReporter()
|
|
99
|
+
spec.on('data', (data) => {
|
|
100
|
+
console.log(data.toString('utf8'))
|
|
101
|
+
})
|
|
102
|
+
|
|
103
|
+
const log = []
|
|
104
|
+
const print = (msg) => (groupCI ? log.push(msg) : console.log(msg))
|
|
105
|
+
const dump = () => {
|
|
106
|
+
middle(file, !failedFiles.has(file))
|
|
107
|
+
for (const line of log) console.log(line)
|
|
108
|
+
log.length = 0
|
|
109
|
+
tail()
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
const files = new Set()
|
|
113
|
+
const failedFiles = new Set()
|
|
114
|
+
const cwd = process.cwd()
|
|
115
|
+
const path = []
|
|
116
|
+
let file
|
|
117
|
+
const diagnostic = []
|
|
118
|
+
const delayed = []
|
|
119
|
+
const isTopLevelTest = ({ nesting, line, column, name, file }) =>
|
|
120
|
+
nesting === 0 && line === 1 && column === 1 && file.endsWith(name) && resolve(name) === file // some events have data.file resolved, some not)
|
|
121
|
+
const processNewFile = (data) => {
|
|
122
|
+
const newFile = relative(cwd, data.entry || data.file) // some events have data.file resolved, some not
|
|
123
|
+
if (newFile === file) return
|
|
124
|
+
if (file !== undefined) dump()
|
|
125
|
+
file = newFile
|
|
126
|
+
assert(files.has(file), 'Cound not determine file')
|
|
127
|
+
head(file)
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
for await (const { type, data } of source) {
|
|
131
|
+
// Ignored: test:complete (no support on older Node.js), test:plan, test:dequeue, test:enqueue
|
|
132
|
+
switch (type) {
|
|
133
|
+
case 'test:dequeue':
|
|
134
|
+
if (data.nesting === 0 && !Object.hasOwn(data, 'file')) files.add(relative(cwd, data.name)) // old-style
|
|
135
|
+
if (isTopLevelTest(data)) files.add(relative(cwd, data.file))
|
|
136
|
+
break
|
|
137
|
+
case 'test:start':
|
|
138
|
+
processNewFile(data)
|
|
139
|
+
path.push(data.name)
|
|
140
|
+
while (delayed.length > 0) print(delayed.shift())
|
|
141
|
+
break
|
|
142
|
+
case 'test:pass':
|
|
143
|
+
const label = data.skip ? color('⏭ SKIP ', dim) : color('✔ PASS ', 'green')
|
|
144
|
+
print(`${label}${path.join(' > ')}${formatSuffix(data)}`)
|
|
145
|
+
assert(path.pop() === data.name)
|
|
146
|
+
break
|
|
147
|
+
case 'test:fail':
|
|
148
|
+
print(`${color('✖ FAIL ', 'red')}${path.join(' > ')}${formatSuffix(data)}`)
|
|
149
|
+
assert(path.pop() === data.name)
|
|
150
|
+
if (!data.todo) failedFiles.add(file)
|
|
151
|
+
if (!notPrintedError(data.details.error)) {
|
|
152
|
+
const { body, loc } = extractError(data, relative(cwd, data.file)) // might be different from current file if in subimport
|
|
153
|
+
if (!data.todo && CI && loc.line != null && loc.col != null) {
|
|
154
|
+
print(`::error ${serializeGitHub(Object.entries(loc))}::${escapeGitHub(body)}`)
|
|
155
|
+
} else if (body) {
|
|
156
|
+
print(`${body.replace(/^/gmu, ' ')}\n`)
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
break
|
|
161
|
+
case 'test:watch:drained':
|
|
162
|
+
assert(!groupCI, 'Can not mix --watch with CI grouping')
|
|
163
|
+
console.log(color(`ℹ waiting for changes as we are in --watch mode`, 'blue'))
|
|
164
|
+
break
|
|
165
|
+
case 'test:diagnostic':
|
|
166
|
+
if (/^suites \d+$/.test(data.message)) break // we count suites = files
|
|
167
|
+
diagnostic.push(color(`ℹ ${data.message}`, 'blue'))
|
|
168
|
+
break
|
|
169
|
+
case 'test:stderr':
|
|
170
|
+
case 'test:stdout':
|
|
171
|
+
const handle = path.length > 0 ? print : (arg) => delayed.push(arg)
|
|
172
|
+
handle(data.message.replace(/\n$/, '')) // these are printed at test:start
|
|
173
|
+
break
|
|
174
|
+
case 'test:coverage':
|
|
175
|
+
spec.write({ type, data }) // let spec reporter handle that
|
|
176
|
+
break
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
dump()
|
|
181
|
+
for (const line of diagnostic) console.log(line)
|
|
182
|
+
summary([...files], [...failedFiles])
|
|
183
|
+
}
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
const { Worker, MessageChannel, isMainThread, parentPort } = require('node:worker_threads')
|
|
2
|
+
const { once } = require('node:events')
|
|
3
|
+
const { availableParallelism } = require('node:os')
|
|
4
|
+
|
|
5
|
+
if (isMainThread) {
|
|
6
|
+
const maxWorkers = availableParallelism() >= 4 ? 2 : 1
|
|
7
|
+
const workers = []
|
|
8
|
+
|
|
9
|
+
const getWorker = () => {
|
|
10
|
+
const idle = workers.find((info) => info.busy === 0)
|
|
11
|
+
if (idle) return idle
|
|
12
|
+
|
|
13
|
+
if (workers.length < maxWorkers) {
|
|
14
|
+
const worker = new Worker(__filename)
|
|
15
|
+
worker.unref()
|
|
16
|
+
// unhandled top-level errors will crash automatically, which is desired behavior, no need to listen to error
|
|
17
|
+
workers.unshift({ worker, busy: 0 })
|
|
18
|
+
} else if (workers.length > 1) {
|
|
19
|
+
workers.sort((a, b) => a.busy - b.busy)
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
return workers[0]
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
const transformAsync = async (code, options) => {
|
|
26
|
+
const info = getWorker()
|
|
27
|
+
info.busy++
|
|
28
|
+
const channel = new MessageChannel()
|
|
29
|
+
info.worker.postMessage({ port: channel.port1, code, options }, [channel.port1])
|
|
30
|
+
const [{ result, error }] = await once(channel.port2, 'message')
|
|
31
|
+
info.busy--
|
|
32
|
+
if (error) throw error
|
|
33
|
+
return result
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
module.exports = { transformAsync }
|
|
37
|
+
} else {
|
|
38
|
+
const babel = require('@babel/core')
|
|
39
|
+
const tryLoadPlugin = (name) => {
|
|
40
|
+
// Try unwrapping plugin names, as otherwise Babel tries to require them from the wrong dir,
|
|
41
|
+
// which breaks strict directory structure under pnpm in some setups
|
|
42
|
+
try {
|
|
43
|
+
if (typeof name === 'string' && name.startsWith('@babel/plugin-')) return require(name)
|
|
44
|
+
} catch {}
|
|
45
|
+
|
|
46
|
+
return name
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
parentPort.on('message', ({ port, code: input, options }) => {
|
|
50
|
+
try {
|
|
51
|
+
// eslint-disable-next-line @exodus/mutable/no-param-reassign-prop-only
|
|
52
|
+
if (options.plugins) options.plugins = options.plugins.map((name) => tryLoadPlugin(name))
|
|
53
|
+
const { code, sourcetype, map } = babel.transformSync(input, options) // async here is useless and slower
|
|
54
|
+
// additional properties are deleted as we don't want to transfer e.g. Plugin instances
|
|
55
|
+
port.postMessage({ result: { code, sourcetype, map } })
|
|
56
|
+
} catch (error) {
|
|
57
|
+
port.postMessage({ error })
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
port.close()
|
|
61
|
+
})
|
|
62
|
+
}
|
|
@@ -0,0 +1,292 @@
|
|
|
1
|
+
import assert from 'node:assert/strict'
|
|
2
|
+
import { readFile } from 'node:fs/promises'
|
|
3
|
+
import { existsSync } from 'node:fs'
|
|
4
|
+
import { fileURLToPath, pathToFileURL } from 'node:url'
|
|
5
|
+
import { basename, dirname, extname, resolve, join } from 'node:path'
|
|
6
|
+
import { createRequire } from 'node:module'
|
|
7
|
+
import { randomUUID as uuid, randomBytes } from 'node:crypto'
|
|
8
|
+
import * as esbuild from 'esbuild'
|
|
9
|
+
import glob from 'fast-glob'
|
|
10
|
+
|
|
11
|
+
const require = createRequire(import.meta.url)
|
|
12
|
+
const resolveRequire = (query) => require.resolve(query)
|
|
13
|
+
const resolveImport = import.meta.resolve && ((query) => fileURLToPath(import.meta.resolve(query)))
|
|
14
|
+
|
|
15
|
+
const readSnapshots = async (files, resolvers) => {
|
|
16
|
+
const snapshots = []
|
|
17
|
+
for (const file of files) {
|
|
18
|
+
for (const resolver of resolvers) {
|
|
19
|
+
const snapshotFile = join(...resolver(dirname(file), basename(file)))
|
|
20
|
+
try {
|
|
21
|
+
snapshots.push([snapshotFile, await readFile(snapshotFile, 'utf8')])
|
|
22
|
+
} catch (e) {
|
|
23
|
+
if (e.code !== 'ENOENT') throw e
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
return snapshots
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
const loadPipeline = [
|
|
32
|
+
function (source, filepath) {
|
|
33
|
+
return source
|
|
34
|
+
.replace(/\bimport\.meta\.url\b/g, JSON.stringify(pathToFileURL(filepath)))
|
|
35
|
+
.replace(/\b(__dirname|import\.meta\.dirname)\b/g, JSON.stringify(dirname(filepath)))
|
|
36
|
+
.replace(/\b(__filename|import\.meta\.filename)\b/g, JSON.stringify(filepath))
|
|
37
|
+
},
|
|
38
|
+
]
|
|
39
|
+
|
|
40
|
+
const options = {}
|
|
41
|
+
|
|
42
|
+
export const init = async ({ platform, jest, flow, target, jestConfig, outdir }) => {
|
|
43
|
+
Object.assign(options, { platform, jest, flow, target, jestConfig, outdir })
|
|
44
|
+
|
|
45
|
+
if (options.flow) {
|
|
46
|
+
const { default: flowRemoveTypes } = await import('flow-remove-types')
|
|
47
|
+
loadPipeline.unshift((source) => flowRemoveTypes(source, { pretty: true }).toString())
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
if (options.platform === 'hermes') {
|
|
51
|
+
const babel = await import('./babel-worker.cjs')
|
|
52
|
+
loadPipeline.push(async (source) => {
|
|
53
|
+
const result = await babel.transformAsync(source, {
|
|
54
|
+
compact: false,
|
|
55
|
+
babelrc: false,
|
|
56
|
+
configFile: false,
|
|
57
|
+
plugins: [
|
|
58
|
+
'@babel/plugin-syntax-typescript',
|
|
59
|
+
'@babel/plugin-transform-block-scoping',
|
|
60
|
+
'@babel/plugin-transform-class-properties',
|
|
61
|
+
'@babel/plugin-transform-classes',
|
|
62
|
+
'@babel/plugin-transform-private-methods',
|
|
63
|
+
],
|
|
64
|
+
})
|
|
65
|
+
return result.code
|
|
66
|
+
})
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
const hermesSupported = {
|
|
71
|
+
arrow: false,
|
|
72
|
+
class: false, // we get a safeguard check this way that it's not used
|
|
73
|
+
'async-generator': false,
|
|
74
|
+
'const-and-let': false, // have to explicitly set for esbuild to not emit that in helpers, also to get a safeguard check
|
|
75
|
+
'for-await': false,
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
const getPackageFiles = async (dir) => {
|
|
79
|
+
// Returns an empty list on errors
|
|
80
|
+
let patterns
|
|
81
|
+
try {
|
|
82
|
+
patterns = JSON.parse(await readFile(resolve(dir, 'package.json'), 'utf8')).files
|
|
83
|
+
} catch {}
|
|
84
|
+
|
|
85
|
+
if (!patterns) {
|
|
86
|
+
const parent = dirname(dir)
|
|
87
|
+
if (parent !== dir) return getPackageFiles(parent)
|
|
88
|
+
return []
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
// Hack for now, TODO: fix this
|
|
92
|
+
const expanded = patterns.flatMap((x) => (x.includes('.') ? [x] : [x, `${x}/**/*`]))
|
|
93
|
+
return glob(expanded, { ignore: ['**/node_modules'], cwd: dir, absolute: true })
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
const loadCache = new Map()
|
|
97
|
+
const loadSourceFile = async (filepath) => {
|
|
98
|
+
if (!loadCache.has(filepath)) {
|
|
99
|
+
const load = async () => {
|
|
100
|
+
let contents = await readFile(filepath, 'utf8')
|
|
101
|
+
for (const transform of loadPipeline) contents = await transform(contents, filepath)
|
|
102
|
+
return contents
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
loadCache.set(filepath, load())
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
return loadCache.get(filepath)
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
export const build = async (...files) => {
|
|
112
|
+
const input = []
|
|
113
|
+
const importSource = async (file) => input.push(await loadSourceFile(resolveRequire(file)))
|
|
114
|
+
const importFile = (...args) => input.push(`await import(${JSON.stringify(resolve(...args))});`)
|
|
115
|
+
const stringify = (x) => ([undefined, null].includes(x) ? `${x}` : JSON.stringify(x))
|
|
116
|
+
|
|
117
|
+
if (!['node'].includes(options.platform)) {
|
|
118
|
+
if (['jsc', 'hermes', 'd8'].includes(options.platform)) {
|
|
119
|
+
const entropy = randomBytes(5 * 1024).toString('base64')
|
|
120
|
+
input.push(`globalThis.EXODUS_TEST_CRYPTO_ENTROPY = ${stringify(entropy)};`)
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
await importSource('./modules/globals.cjs')
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
if (options.jest) {
|
|
127
|
+
const { jestConfig } = options
|
|
128
|
+
const preload = [...(jestConfig.setupFiles || []), ...(jestConfig.setupFilesAfterEnv || [])]
|
|
129
|
+
if (jestConfig.testEnvironment && jestConfig.testEnvironment !== 'node') {
|
|
130
|
+
const { specialEnvironments } = await import('../src/jest.environment.js')
|
|
131
|
+
assert(Object.hasOwn(specialEnvironments, jestConfig.testEnvironment))
|
|
132
|
+
preload.push(...(specialEnvironments[jestConfig.testEnvironment].dependencies || []))
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
if (preload.length === 0) {
|
|
136
|
+
input.push(`globalThis.EXODUS_TEST_PRELOADED = []`)
|
|
137
|
+
} else {
|
|
138
|
+
assert(jestConfig.rootDir)
|
|
139
|
+
const local = createRequire(resolve(jestConfig.rootDir, 'package.json'))
|
|
140
|
+
const w = (f) => `[${stringify(f)}, () => require(${stringify(local.resolve(f))})]`
|
|
141
|
+
input.push(`globalThis.EXODUS_TEST_PRELOADED = [${preload.map((f) => w(f)).join(', ')}]`)
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
await importSource('../bin/jest.js')
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
for (const file of files) importFile(file)
|
|
148
|
+
|
|
149
|
+
const filename = files.length === 1 ? `${files[0]}-${uuid().slice(0, 8)}` : `bundle-${uuid()}`
|
|
150
|
+
const outfile = `${join(options.outdir, filename)}.js`
|
|
151
|
+
const EXODUS_TEST_SNAPSHOTS = await readSnapshots(files, [
|
|
152
|
+
(dir, name) => [dir, `${name}.snapshot`], // node:test
|
|
153
|
+
(dir, name) => [dir, '__snapshots__', `${name}.snap`], // jest
|
|
154
|
+
])
|
|
155
|
+
const EXODUS_TEST_RECORDINGS = await readSnapshots(files, [
|
|
156
|
+
(dir, name) => [dir, '__recordings__', 'fetch', `${name}.json`],
|
|
157
|
+
(dir, name) => [dir, '__recordings__', 'websocket', `${name}.json`],
|
|
158
|
+
])
|
|
159
|
+
const buildWrap = async (opts) => esbuild.build(opts).catch((err) => err)
|
|
160
|
+
let main = input.join(';\n')
|
|
161
|
+
if (['jsc', 'hermes', 'd8'].includes(options.platform)) {
|
|
162
|
+
const exit = `EXODUS_TEST_PROCESS.exitCode = 1; EXODUS_TEST_PROCESS._maybeProcessExitCode();`
|
|
163
|
+
main = `try {\n${main}\n} catch (err) { print(err); ${exit} }`
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
const fsfiles = await getPackageFiles(filename ? dirname(resolve(filename)) : process.cwd())
|
|
167
|
+
|
|
168
|
+
const hasBuffer = ['node', 'bun'].includes(options.platform)
|
|
169
|
+
const api = (f) => resolveRequire(`./modules/${f}`)
|
|
170
|
+
const res = await buildWrap({
|
|
171
|
+
logLevel: 'silent',
|
|
172
|
+
stdin: {
|
|
173
|
+
contents: `(async function () {\n${main}\n})()`,
|
|
174
|
+
resolveDir: dirname(fileURLToPath(import.meta.url)),
|
|
175
|
+
},
|
|
176
|
+
bundle: true,
|
|
177
|
+
outdir: options.outdir,
|
|
178
|
+
entryNames: filename,
|
|
179
|
+
platform: 'neutral',
|
|
180
|
+
mainFields: ['browser', 'module', 'main'],
|
|
181
|
+
define: {
|
|
182
|
+
'process.env.FORCE_COLOR': stringify('0'),
|
|
183
|
+
'process.env.NO_COLOR': stringify('1'),
|
|
184
|
+
'process.env.NODE_ENV': stringify(process.env.NODE_ENV),
|
|
185
|
+
'process.env.EXODUS_TEST_CONTEXT': stringify('pure'),
|
|
186
|
+
'process.env.EXODUS_TEST_ENVIRONMENT': stringify('bundle'), // always 'bundle'
|
|
187
|
+
'process.env.EXODUS_TEST_PLATFORM': stringify(process.env.EXODUS_TEST_PLATFORM), // e.g. 'hermes', 'node'
|
|
188
|
+
'process.env.EXODUS_TEST_ENGINE': stringify(process.env.EXODUS_TEST_ENGINE), // e.g. 'hermes:bundle', 'node:bundle'
|
|
189
|
+
'process.env.EXODUS_TEST_JEST_CONFIG': stringify(JSON.stringify(options.jestConfig)),
|
|
190
|
+
'process.env.EXODUS_TEST_EXECARGV': stringify(process.env.EXODUS_TEST_EXECARGV),
|
|
191
|
+
'process.env.EXODUS_TEST_ONLY': stringify(process.env.EXODUS_TEST_ONLY),
|
|
192
|
+
'process.env.NODE_DEBUG': stringify(),
|
|
193
|
+
'process.env.DEBUG': stringify(),
|
|
194
|
+
'process.env.READABLE_STREAM': stringify(),
|
|
195
|
+
'process.env.CI': stringify(process.env.CI),
|
|
196
|
+
'process.env.CI_ENABLE_VERBOSE_LOGS': stringify(process.env.CI_ENABLE_VERBOSE_LOGS),
|
|
197
|
+
'process.browser': stringify(true),
|
|
198
|
+
'process.emitWarning': 'undefined',
|
|
199
|
+
'process.stderr': 'undefined',
|
|
200
|
+
'process.stdout': 'undefined',
|
|
201
|
+
'process.type': 'undefined',
|
|
202
|
+
'process.version': stringify('v22.5.1'), // shouldn't depend on currently used Node.js version
|
|
203
|
+
'process.versions.node': stringify('22.5.1'), // see line above
|
|
204
|
+
EXODUS_TEST_FILES: stringify(files.map((f) => [dirname(f), basename(f)])),
|
|
205
|
+
EXODUS_TEST_SNAPSHOTS: stringify(EXODUS_TEST_SNAPSHOTS),
|
|
206
|
+
EXODUS_TEST_RECORDINGS: stringify(EXODUS_TEST_RECORDINGS),
|
|
207
|
+
EXODUS_TEST_FSFILES: stringify(fsfiles), // TODO: can we safely use relative paths?
|
|
208
|
+
},
|
|
209
|
+
alias: {
|
|
210
|
+
// Jest, tape and node:test
|
|
211
|
+
'@jest/globals': resolveImport('../src/jest.js'),
|
|
212
|
+
tape: resolveImport('../src/tape.cjs'),
|
|
213
|
+
'tape-promise/tape': resolveImport('../src/tape.cjs'),
|
|
214
|
+
'node:test': resolveImport('../src/node.js'),
|
|
215
|
+
// Inner
|
|
216
|
+
'exodus-test:util-format': api('util-format.cjs'),
|
|
217
|
+
// Node browserify
|
|
218
|
+
'node:assert': dirname(dirname(resolveRequire('assert/'))),
|
|
219
|
+
'node:assert/strict': api('assert-strict.cjs'),
|
|
220
|
+
'node:fs': api('fs.cjs'),
|
|
221
|
+
'node:fs/promises': api('fs-promises.cjs'),
|
|
222
|
+
fs: api('fs.cjs'),
|
|
223
|
+
'fs/promises': api('fs-promises.cjs'),
|
|
224
|
+
assert: dirname(dirname(resolveRequire('assert/'))),
|
|
225
|
+
buffer: hasBuffer ? api('node-buffer.cjs') : dirname(resolveRequire('buffer/')),
|
|
226
|
+
child_process: api('child_process.cjs'),
|
|
227
|
+
constants: resolveRequire('constants-browserify'),
|
|
228
|
+
crypto: api('crypto.cjs'),
|
|
229
|
+
events: dirname(resolveRequire('events/')),
|
|
230
|
+
http: api('http.cjs'),
|
|
231
|
+
https: api('https.cjs'),
|
|
232
|
+
os: resolveRequire('os-browserify'),
|
|
233
|
+
path: resolveRequire('path-browserify'),
|
|
234
|
+
querystring: resolveRequire('querystring-es3'),
|
|
235
|
+
stream: resolveRequire('stream-browserify'),
|
|
236
|
+
timers: resolveRequire('timers-browserify'),
|
|
237
|
+
url: dirname(resolveRequire('url/')),
|
|
238
|
+
util: dirname(resolveRequire('util/')),
|
|
239
|
+
zlib: resolveRequire('browserify-zlib'),
|
|
240
|
+
// expect-related deps
|
|
241
|
+
'ansi-styles': api('ansi-styles.cjs'),
|
|
242
|
+
'jest-util': api('jest-util.js'),
|
|
243
|
+
'jest-message-util': api('jest-message-util.js'),
|
|
244
|
+
// unwanted deps
|
|
245
|
+
bindings: api('empty/function-throw.cjs'),
|
|
246
|
+
'node-gyp-build': api('empty/function-throw.cjs'),
|
|
247
|
+
ws: api('ws.cjs'),
|
|
248
|
+
},
|
|
249
|
+
sourcemap: ['hermes', 'jsc', 'd8'].includes(options.platform) ? 'inline' : 'linked', // FIXME?
|
|
250
|
+
sourcesContent: false,
|
|
251
|
+
keepNames: true,
|
|
252
|
+
format: 'iife',
|
|
253
|
+
target: options.target || `node${process.versions.node}`,
|
|
254
|
+
supported: {
|
|
255
|
+
bigint: true,
|
|
256
|
+
...(options.platform === 'hermes' ? hermesSupported : {}),
|
|
257
|
+
},
|
|
258
|
+
plugins: [
|
|
259
|
+
{
|
|
260
|
+
name: 'exodus-test.bundle',
|
|
261
|
+
setup({ onLoad }) {
|
|
262
|
+
onLoad({ filter: /\.[cm]?[jt]sx?$/, namespace: 'file' }, async (args) => {
|
|
263
|
+
let filepath = args.path
|
|
264
|
+
// Resolve .native versions
|
|
265
|
+
// TODO: move flag to engine options
|
|
266
|
+
// TODO: maybe follow package.json for this
|
|
267
|
+
if (['jsc', 'hermes'].includes(options.platform)) {
|
|
268
|
+
const maybeNative = filepath.replace(/(\.[cm]?[jt]sx?)$/u, '.native$1')
|
|
269
|
+
if (existsSync(maybeNative)) filepath = maybeNative
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
const loader = extname(filepath).replace(/^\.[cm]?/, '') // TODO: a flag to force jsx/tsx perhaps
|
|
273
|
+
assert(['js', 'ts', 'jsx', 'tx'].includes(loader))
|
|
274
|
+
|
|
275
|
+
return { contents: await loadSourceFile(filepath), loader }
|
|
276
|
+
})
|
|
277
|
+
},
|
|
278
|
+
},
|
|
279
|
+
],
|
|
280
|
+
})
|
|
281
|
+
assert.equal(res instanceof Error, res.errors.length > 0)
|
|
282
|
+
|
|
283
|
+
// if (res.errors.length === 0) require('fs').copyFileSync(outfile, 'tempout.cjs') // DEBUG
|
|
284
|
+
|
|
285
|
+
// We treat warnings as errors, so just merge all them
|
|
286
|
+
const errors = []
|
|
287
|
+
const formatOpts = { color: process.stdout.hasColors?.(), terminalWidth: process.stdout.columns }
|
|
288
|
+
const formatMessages = (list, kind) => esbuild.formatMessages(list, { kind, ...formatOpts })
|
|
289
|
+
if (res.warnings.length > 0) errors.push(...(await formatMessages(res.warnings, 'warning')))
|
|
290
|
+
if (res.errors.length > 0) errors.push(...(await formatMessages(res.errors, 'error')))
|
|
291
|
+
return { file: outfile, errors }
|
|
292
|
+
}
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
const colors = [
|
|
2
|
+
'reset',
|
|
3
|
+
'bold',
|
|
4
|
+
'dim',
|
|
5
|
+
'italic',
|
|
6
|
+
'underline',
|
|
7
|
+
'overline',
|
|
8
|
+
'inverse',
|
|
9
|
+
'hidden',
|
|
10
|
+
'strikethrough',
|
|
11
|
+
'black',
|
|
12
|
+
'red',
|
|
13
|
+
'green',
|
|
14
|
+
'yellow',
|
|
15
|
+
'blue',
|
|
16
|
+
'magenta',
|
|
17
|
+
'cyan',
|
|
18
|
+
'white',
|
|
19
|
+
'blackBright',
|
|
20
|
+
'redBright',
|
|
21
|
+
'greenBright',
|
|
22
|
+
'yellowBright',
|
|
23
|
+
'blueBright',
|
|
24
|
+
'magentaBright',
|
|
25
|
+
'cyanBright',
|
|
26
|
+
'whiteBright',
|
|
27
|
+
'gray',
|
|
28
|
+
'grey',
|
|
29
|
+
'bgBlack',
|
|
30
|
+
'bgRed',
|
|
31
|
+
'bgGreen',
|
|
32
|
+
'bgYellow',
|
|
33
|
+
'bgBlue',
|
|
34
|
+
'bgMagenta',
|
|
35
|
+
'bgCyan',
|
|
36
|
+
'bgWhite',
|
|
37
|
+
'bgBlackBright',
|
|
38
|
+
'bgRedBright',
|
|
39
|
+
'bgGreenBright',
|
|
40
|
+
'bgYellowBright',
|
|
41
|
+
'bgBlueBright',
|
|
42
|
+
'bgMagentaBright',
|
|
43
|
+
'bgCyanBright',
|
|
44
|
+
'bgWhiteBright',
|
|
45
|
+
'bgGray',
|
|
46
|
+
'bgGrey',
|
|
47
|
+
]
|
|
48
|
+
|
|
49
|
+
for (const key of colors) exports[key] = { open: '', close: '' }
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
module.exports = require('node:assert').strict
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
const keys = 'ChildProcess,exec,execFile,execFileSync,execSync,fork,spawn,spawnSync'.split(',')
|
|
2
|
+
|
|
3
|
+
const makeMethod = (key) => {
|
|
4
|
+
// Not an arrow as ChildProcess is a class and can be called with new
|
|
5
|
+
return function () {
|
|
6
|
+
throw new Error(`child_process.${key} unsupported in bundled mode`)
|
|
7
|
+
}
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
module.exports = Object.fromEntries(keys.map((key) => [key, makeMethod(key)]))
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
const cb = require('crypto-browserify')
|
|
2
|
+
const webcrypto = globalThis.crypto
|
|
3
|
+
const randomUUID = () => webcrypto.randomUUID()
|
|
4
|
+
const getRandomValues = (array) => webcrypto.getRandomValues(array)
|
|
5
|
+
module.exports = { ...cb, webcrypto, subtle: webcrypto?.subtle, randomUUID, getRandomValues }
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
throw new Error('module unsupported in bundled mode')
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
module.exports = require('node:fs').promises
|