@exodus/test-bundler 1.0.0-rc.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +3 -0
- package/LICENSE +21 -0
- package/babel-worker.cjs +62 -0
- package/bundle.js +555 -0
- package/modules/ansi-styles.cjs +49 -0
- package/modules/assert-strict.cjs +1 -0
- package/modules/child_process.cjs +10 -0
- package/modules/cluster.cjs +27 -0
- package/modules/crypto.cjs +5 -0
- package/modules/empty/function-throw.cjs +4 -0
- package/modules/empty/module-throw.cjs +1 -0
- package/modules/fs-promises.cjs +1 -0
- package/modules/fs.cjs +123 -0
- package/modules/globals.cjs +341 -0
- package/modules/globals.node.cjs +8 -0
- package/modules/http.cjs +119 -0
- package/modules/https.cjs +11 -0
- package/modules/jest-message-util.js +5 -0
- package/modules/jest-util.js +22 -0
- package/modules/module.cjs +16 -0
- package/modules/node-buffer.cjs +3 -0
- package/modules/text-encoding-utf.cjs +90 -0
- package/modules/tty.cjs +10 -0
- package/modules/url.cjs +32 -0
- package/modules/util-format.cjs +48 -0
- package/modules/util.cjs +4 -0
- package/modules/ws.cjs +20 -0
- package/package.json +81 -0
package/CHANGELOG.md
ADDED
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2024 Exodus Movement, Inc
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/babel-worker.cjs
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
const { Worker, MessageChannel, isMainThread, parentPort } = require('node:worker_threads')
|
|
2
|
+
const { once } = require('node:events')
|
|
3
|
+
const { availableParallelism } = require('node:os')
|
|
4
|
+
|
|
5
|
+
if (isMainThread) {
|
|
6
|
+
const maxWorkers = availableParallelism() >= 4 ? 2 : 1
|
|
7
|
+
const workers = []
|
|
8
|
+
|
|
9
|
+
const getWorker = () => {
|
|
10
|
+
const idle = workers.find((info) => info.busy === 0)
|
|
11
|
+
if (idle) return idle
|
|
12
|
+
|
|
13
|
+
if (workers.length < maxWorkers) {
|
|
14
|
+
const worker = new Worker(__filename)
|
|
15
|
+
worker.unref()
|
|
16
|
+
// unhandled top-level errors will crash automatically, which is desired behavior, no need to listen to error
|
|
17
|
+
workers.unshift({ worker, busy: 0 })
|
|
18
|
+
} else if (workers.length > 1) {
|
|
19
|
+
workers.sort((a, b) => a.busy - b.busy)
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
return workers[0]
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
const transformAsync = async (code, options) => {
|
|
26
|
+
const info = getWorker()
|
|
27
|
+
info.busy++
|
|
28
|
+
const channel = new MessageChannel()
|
|
29
|
+
info.worker.postMessage({ port: channel.port1, code, options }, [channel.port1])
|
|
30
|
+
const [{ result, error }] = await once(channel.port2, 'message')
|
|
31
|
+
info.busy--
|
|
32
|
+
if (error) throw error
|
|
33
|
+
return result
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
module.exports = { transformAsync }
|
|
37
|
+
} else {
|
|
38
|
+
const babel = require('@babel/core')
|
|
39
|
+
const tryLoadPlugin = (name) => {
|
|
40
|
+
// Try unwrapping plugin names, as otherwise Babel tries to require them from the wrong dir,
|
|
41
|
+
// which breaks strict directory structure under pnpm in some setups
|
|
42
|
+
try {
|
|
43
|
+
if (typeof name === 'string' && name.startsWith('@babel/plugin-')) return require(name)
|
|
44
|
+
} catch {}
|
|
45
|
+
|
|
46
|
+
return name
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
parentPort.on('message', ({ port, code: input, options }) => {
|
|
50
|
+
try {
|
|
51
|
+
// eslint-disable-next-line @exodus/mutable/no-param-reassign-prop-only
|
|
52
|
+
if (options.plugins) options.plugins = options.plugins.map((name) => tryLoadPlugin(name))
|
|
53
|
+
const { code, sourcetype, map } = babel.transformSync(input, options) // async here is useless and slower
|
|
54
|
+
// additional properties are deleted as we don't want to transfer e.g. Plugin instances
|
|
55
|
+
port.postMessage({ result: { code, sourcetype, map } })
|
|
56
|
+
} catch (error) {
|
|
57
|
+
port.postMessage({ error })
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
port.close()
|
|
61
|
+
})
|
|
62
|
+
}
|
package/bundle.js
ADDED
|
@@ -0,0 +1,555 @@
|
|
|
1
|
+
import assert from 'node:assert/strict'
|
|
2
|
+
import fsPromises, { readFile, writeFile, readdir } from 'node:fs/promises'
|
|
3
|
+
import { existsSync } from 'node:fs'
|
|
4
|
+
import { fileURLToPath, pathToFileURL } from 'node:url'
|
|
5
|
+
import { basename, dirname, extname, resolve, join, relative } from 'node:path'
|
|
6
|
+
import { createRequire } from 'node:module'
|
|
7
|
+
import { randomUUID as uuid, randomBytes } from 'node:crypto'
|
|
8
|
+
import * as esbuild from 'esbuild'
|
|
9
|
+
import { glob as globImplementation } from '../src/glob.cjs' // TODO: inject when separated
|
|
10
|
+
|
|
11
|
+
const require = createRequire(import.meta.url)
|
|
12
|
+
const resolveRequire = (query) => require.resolve(query)
|
|
13
|
+
const resolveImport = import.meta.resolve && ((query) => fileURLToPath(import.meta.resolve(query)))
|
|
14
|
+
const cjsMockRegex = /\.exodus-test-mock\.cjs$/u
|
|
15
|
+
const cjsMockFallback = `throw new Error('Mocking loaded ESM modules in not possible in bundles')`
|
|
16
|
+
|
|
17
|
+
const readSnapshots = async (files, resolvers) => {
|
|
18
|
+
const snapshots = []
|
|
19
|
+
for (const file of files) {
|
|
20
|
+
for (const resolver of resolvers) {
|
|
21
|
+
const snapshotFile = join(...resolver(dirname(file), basename(file)))
|
|
22
|
+
try {
|
|
23
|
+
snapshots.push([snapshotFile, await readFile(snapshotFile, 'utf8')])
|
|
24
|
+
} catch (e) {
|
|
25
|
+
if (e.code !== 'ENOENT') throw e
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
return snapshots
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
const stringify = (x) => ([undefined, null].includes(x) ? `${x}` : JSON.stringify(x))
|
|
34
|
+
const loadPipeline = [
|
|
35
|
+
function (source, filepath) {
|
|
36
|
+
let res = source
|
|
37
|
+
.replace(/\bimport\.meta\.url\b/g, JSON.stringify(pathToFileURL(filepath)))
|
|
38
|
+
.replace(/\b(__dirname|import\.meta\.dirname)\b/g, JSON.stringify(dirname(filepath)))
|
|
39
|
+
.replace(/\b(__filename|import\.meta\.filename)\b/g, JSON.stringify(filepath))
|
|
40
|
+
|
|
41
|
+
if (options.platform === 'engine262') {
|
|
42
|
+
if (filepath.endsWith('/node_modules/chalk/source/templates.js')) {
|
|
43
|
+
// It has an invalid regex on which engine262 fails
|
|
44
|
+
res = res.replace(
|
|
45
|
+
'const ESCAPE_REGEX = /\\\\(u(?:[a-f\\d]{4}|{[a-f\\d]{1,6}})|x[a-f\\d]{2}|.)|([^\\\\])/gi;',
|
|
46
|
+
'const ESCAPE_REGEX = /\\\\(u(?:[a-f\\d]{4}|\\{[a-f\\d]{1,6}\\})|x[a-f\\d]{2}|.)|([^\\\\])/giu;'
|
|
47
|
+
)
|
|
48
|
+
} else if (filepath.endsWith('/node_modules/qs/lib/parse.js')) {
|
|
49
|
+
res = res.replace('var brackets = /(\\[[^[\\]]*])/;', 'var brackets = /(\\[[^[\\]]*\\])/;')
|
|
50
|
+
res = res.replace('var child = /(\\[[^[\\]]*])/g;', 'var child = /(\\[[^[\\]]*\\])/g;')
|
|
51
|
+
} else if (filepath.endsWith('/node_modules/url/url.js')) {
|
|
52
|
+
// .substr is not part of the main ECMA-262 spec
|
|
53
|
+
// We need this module for pathToFileURL
|
|
54
|
+
res = res.replace('&& protocol.substr(-1) !==', '&& protocol[protocol.length - 1] !==')
|
|
55
|
+
res = res.replace('= rest.substr(proto.length);', '= rest.substring(proto.length);')
|
|
56
|
+
res = res.replace('= rest.substr(2);', '= rest.substring(2);')
|
|
57
|
+
res = res.replace('= rest.substr(0, 2) ===', '= rest.substring(0, 2) ===')
|
|
58
|
+
} else if (filepath.endsWith('/node_modules/buffer/index.js')) {
|
|
59
|
+
res = res.replace('.substr(i * 2, 2)', '.substring(i * 2, i * 2 + 2)')
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
// Unneded polyfills
|
|
64
|
+
for (const [a, b] of Object.entries({
|
|
65
|
+
'is-nan': 'Number.isNaN', // https://www.npmjs.com/package/is-nan description: ES2015-compliant shim for Number.isNaN
|
|
66
|
+
'is-nan/polyfill': '() => Number.isNaN',
|
|
67
|
+
'object.assign': 'Object.assign',
|
|
68
|
+
'object.assign/polyfill': '() => Object.assign',
|
|
69
|
+
'object-is': 'Object.is',
|
|
70
|
+
'object-is/polyfill': '() => Object.is',
|
|
71
|
+
hasown: 'Object.hasOwn',
|
|
72
|
+
gopd: 'Object.getOwnPropertyDescriptor',
|
|
73
|
+
'has-property-descriptors': '() => true',
|
|
74
|
+
'has-symbols': '() => true',
|
|
75
|
+
'has-symbols/shams': '() => true',
|
|
76
|
+
'has-tostringtag': "() => typeof Symbol.toStringTag === 'symbol'",
|
|
77
|
+
'has-tostringtag/shams': '() => !!Symbol.toStringTag',
|
|
78
|
+
'es-define-property': 'Object.defineProperty',
|
|
79
|
+
'es-errors': 'Error',
|
|
80
|
+
'es-errors/eval': 'EvalError',
|
|
81
|
+
'es-errors/range': 'RangeError',
|
|
82
|
+
'es-errors/ref': 'ReferenceError',
|
|
83
|
+
'es-errors/syntax': 'SyntaxError',
|
|
84
|
+
'es-errors/type': 'TypeError',
|
|
85
|
+
'es-errors/uri': 'URIError',
|
|
86
|
+
})) {
|
|
87
|
+
res = res.replaceAll(`require('${a}')`, `(${b})`).replaceAll(`require("${a}")`, `(${b})`) // Assumes well-formed names/code
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
return res
|
|
91
|
+
},
|
|
92
|
+
]
|
|
93
|
+
|
|
94
|
+
const options = {}
|
|
95
|
+
|
|
96
|
+
export const init = async ({ platform, jest, flow, target, jestConfig, outdir, entropySize }) => {
|
|
97
|
+
Object.assign(options, { platform, jest, flow, target, jestConfig, outdir, entropySize })
|
|
98
|
+
|
|
99
|
+
if (options.flow) {
|
|
100
|
+
const { default: flowRemoveTypes } = await import('flow-remove-types')
|
|
101
|
+
loadPipeline.unshift((source) => flowRemoveTypes(source, { pretty: true }).toString())
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
if (options.platform === 'hermes') {
|
|
105
|
+
const babel = await import('./babel-worker.cjs')
|
|
106
|
+
loadPipeline.push(async (source) => {
|
|
107
|
+
const result = await babel.transformAsync(source, {
|
|
108
|
+
compact: false,
|
|
109
|
+
babelrc: false,
|
|
110
|
+
configFile: false,
|
|
111
|
+
plugins: [
|
|
112
|
+
'@babel/plugin-syntax-typescript',
|
|
113
|
+
'@babel/plugin-syntax-import-attributes',
|
|
114
|
+
'@babel/plugin-transform-block-scoping',
|
|
115
|
+
'@babel/plugin-transform-class-properties',
|
|
116
|
+
'@babel/plugin-transform-classes',
|
|
117
|
+
'@babel/plugin-transform-private-methods',
|
|
118
|
+
],
|
|
119
|
+
})
|
|
120
|
+
return result.code
|
|
121
|
+
})
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
const hermesSupported = {
|
|
126
|
+
arrow: false,
|
|
127
|
+
class: false, // we get a safeguard check this way that it's not used
|
|
128
|
+
'async-generator': false,
|
|
129
|
+
'const-and-let': false, // have to explicitly set for esbuild to not emit that in helpers, also to get a safeguard check
|
|
130
|
+
'for-await': false,
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
async function glob(patterns, { exclude, cwd }) {
|
|
134
|
+
if (globImplementation) return globImplementation(patterns, { exclude, cwd })
|
|
135
|
+
return Array.fromAsync(fsPromises.glob(patterns, { exclude, cwd }))
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
const getPackageFiles = async (dir) => {
|
|
139
|
+
// Returns an empty list on errors
|
|
140
|
+
let patterns
|
|
141
|
+
try {
|
|
142
|
+
patterns = JSON.parse(await readFile(resolve(dir, 'package.json'), 'utf8')).files
|
|
143
|
+
} catch {}
|
|
144
|
+
|
|
145
|
+
if (!patterns) {
|
|
146
|
+
const parent = dirname(dir)
|
|
147
|
+
if (parent !== dir) return getPackageFiles(parent)
|
|
148
|
+
return []
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
// Hack for now, TODO: fix this
|
|
152
|
+
const expanded = patterns.flatMap((x) => (x.includes('.') ? [x] : [x, `${x}/**/*`]))
|
|
153
|
+
const files = await glob(expanded, { exclude: ['**/node_modules'], cwd: dir })
|
|
154
|
+
return files.map((file) => resolve(dir, file)) // absolute
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
const loadCache = new Map()
|
|
158
|
+
const loadSourceFileBase = async (filepath) => {
|
|
159
|
+
if (!loadCache.has(filepath)) {
|
|
160
|
+
const load = async () => {
|
|
161
|
+
let contents = await readFile(filepath.replace(cjsMockRegex, ''), 'utf8')
|
|
162
|
+
for (const transform of loadPipeline) contents = await transform(contents, filepath)
|
|
163
|
+
return contents
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
loadCache.set(filepath, load())
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
return loadCache.get(filepath)
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
export const build = async (...files) => {
|
|
173
|
+
const envOverride = { FORCE_COLOR: '0', NO_COLOR: '1' }
|
|
174
|
+
const getEnv = (key) => (Object.hasOwn(envOverride, key) ? envOverride[key] : process.env[key]) // We know key is safe as it comes from regex below
|
|
175
|
+
const specificLoadPipeline = [
|
|
176
|
+
(src) => src.replace(/\b(?:process\.env\.([A-Z0-9_]+))\b/gu, (_, x) => stringify(getEnv(x))),
|
|
177
|
+
]
|
|
178
|
+
const loadSourceFile = async (filepath) => {
|
|
179
|
+
let contents = await loadSourceFileBase(filepath)
|
|
180
|
+
for (const transform of specificLoadPipeline) contents = await transform(contents, filepath)
|
|
181
|
+
return contents
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
const input = []
|
|
185
|
+
const importSource = async (file) => input.push(await loadSourceFile(resolveRequire(file)))
|
|
186
|
+
const importFile = (...args) => input.push(`await import(${JSON.stringify(resolve(...args))});`)
|
|
187
|
+
|
|
188
|
+
const nodeApisPlatforms = new Set(['node', 'bun', 'electron'])
|
|
189
|
+
const hasNodeApis = nodeApisPlatforms.has(options.platform) && !process.env.EXODUS_TEST_IS_BROWSER
|
|
190
|
+
if (hasNodeApis && ['node', 'electron'].includes(options.platform)) {
|
|
191
|
+
await importSource('./modules/globals.node.cjs')
|
|
192
|
+
} else {
|
|
193
|
+
if (process.env.EXODUS_TEST_IS_BAREBONE) {
|
|
194
|
+
const entropy = randomBytes(options.entropySize ?? 5 * 1024).toString('base64')
|
|
195
|
+
input.push(`globalThis.EXODUS_TEST_CRYPTO_ENTROPY = ${stringify(entropy)};`)
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
await importSource('./modules/globals.cjs')
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
if (options.jest) {
|
|
202
|
+
const { jestConfig } = options
|
|
203
|
+
const preload = [...(jestConfig.setupFiles || []), ...(jestConfig.setupFilesAfterEnv || [])]
|
|
204
|
+
if (jestConfig.testEnvironment && jestConfig.testEnvironment !== 'node') {
|
|
205
|
+
const { specialEnvironments } = await import('../src/jest.environment.js')
|
|
206
|
+
assert(Object.hasOwn(specialEnvironments, jestConfig.testEnvironment))
|
|
207
|
+
preload.push(...(specialEnvironments[jestConfig.testEnvironment].dependencies || []))
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
if (preload.length === 0) {
|
|
211
|
+
input.push(`globalThis.EXODUS_TEST_PRELOADED = []`)
|
|
212
|
+
} else {
|
|
213
|
+
assert(jestConfig.rootDir)
|
|
214
|
+
const local = createRequire(resolve(jestConfig.rootDir, 'package.json'))
|
|
215
|
+
const w = (f) => `[${stringify(f)}, () => require(${stringify(local.resolve(f))})]`
|
|
216
|
+
input.push(`globalThis.EXODUS_TEST_PRELOADED = [${preload.map((f) => w(f)).join(', ')}]`)
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
await importSource('../bin/jest.js')
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
for (const file of files) importFile(file)
|
|
223
|
+
|
|
224
|
+
const filename = files.length === 1 ? `${files[0]}-${uuid().slice(0, 8)}` : `bundle-${uuid()}`
|
|
225
|
+
const outfile = `${join(options.outdir, filename)}.js`
|
|
226
|
+
const EXODUS_TEST_SNAPSHOTS = await readSnapshots(files, [
|
|
227
|
+
(dir, name) => [dir, `${name}.snapshot`], // node:test
|
|
228
|
+
(dir, name) => [dir, '__snapshots__', `${name}.snap`], // jest
|
|
229
|
+
])
|
|
230
|
+
const EXODUS_TEST_RECORDINGS = await readSnapshots(files, [
|
|
231
|
+
(dir, name) => [dir, '__recordings__', 'fetch', `${name}.json`],
|
|
232
|
+
(dir, name) => [dir, '__recordings__', 'websocket', `${name}.json`],
|
|
233
|
+
])
|
|
234
|
+
const buildWrap = async (opts) => esbuild.build(opts).catch((err) => err)
|
|
235
|
+
let main = input.join(';\n')
|
|
236
|
+
const exit = `EXODUS_TEST_PROCESS.exitCode = 1; EXODUS_TEST_PROCESS._maybeProcessExitCode();`
|
|
237
|
+
if (process.env.EXODUS_TEST_IS_BAREBONE) {
|
|
238
|
+
main = `try {\n${main}\n} catch (err) { print(err); ${exit} }`
|
|
239
|
+
} else if (process.env.EXODUS_TEST_IS_BROWSER) {
|
|
240
|
+
main = `try {\n${main}\n} catch (err) { console.error(err); ${exit} }`
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
const fsfiles = await getPackageFiles(filename ? dirname(resolve(filename)) : process.cwd())
|
|
244
|
+
const fsFilesContents = new Map()
|
|
245
|
+
const fsFilesDirs = new Map()
|
|
246
|
+
const cwd = process.cwd()
|
|
247
|
+
const fixturesRegex = /(fixtures|samples|vectors|wycheproof)/u
|
|
248
|
+
const aggressiveExtensions = /\.(json|txt|hex|wasm)(\.gz)?$/u // These are bundled when just used in path.join and by wildcard from fixtures/
|
|
249
|
+
const fileAllowed = (f) =>
|
|
250
|
+
f && f.startsWith(`${cwd}/`) && resolve(f) === f && /^[a-z0-9@_./-]+$/iu.test(relative(cwd, f))
|
|
251
|
+
|
|
252
|
+
const fsFilesAdd = async (file) => {
|
|
253
|
+
if (!fileAllowed(file)) return
|
|
254
|
+
try {
|
|
255
|
+
const data = await readFile(file, 'base64')
|
|
256
|
+
if (fsFilesContents.has(file)) {
|
|
257
|
+
assert(fsFilesContents.get(file) === data)
|
|
258
|
+
} else {
|
|
259
|
+
fsFilesContents.set(file, data)
|
|
260
|
+
}
|
|
261
|
+
} catch (e) {
|
|
262
|
+
if (e.code !== 'ENOENT') throw e
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
const fixturesSeen = { fs: false, fixtures: false, bundled: false }
|
|
267
|
+
const fsFilesBundleFixtures = async (reason) => {
|
|
268
|
+
if (fixturesSeen.bundled || !filename) return
|
|
269
|
+
if (reason === 'fs' || reason === 'fixtures') fixturesSeen[reason] = true
|
|
270
|
+
if (!fixturesSeen.fs || !fixturesSeen.fixtures) return
|
|
271
|
+
fixturesSeen.bundled = true
|
|
272
|
+
const dir = dirname(resolve(filename))
|
|
273
|
+
for (const name of await readdir(dir, { recursive: true })) {
|
|
274
|
+
const parent = dirname(name)
|
|
275
|
+
if (!fixturesRegex.test(parent)) continue // relative dir path should look like a fixtures dir
|
|
276
|
+
|
|
277
|
+
// Save as directory entry into parent dir
|
|
278
|
+
const subdir = resolve(dir, parent)
|
|
279
|
+
if (fileAllowed(subdir)) {
|
|
280
|
+
if (!fsFilesDirs.has(subdir)) fsFilesDirs.set(subdir, [])
|
|
281
|
+
fsFilesDirs.get(subdir).push(basename(name))
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
// Save to files
|
|
285
|
+
const file = resolve(dir, name)
|
|
286
|
+
if (aggressiveExtensions.test(file)) await fsFilesAdd(file)
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
specificLoadPipeline.push(async (source, filepath) => {
|
|
291
|
+
for (const m of source.matchAll(/readFileSync\(\s*(?:"([^"\\]+)"|'([^'\\]+)')[),]/gu)) {
|
|
292
|
+
await fsFilesAdd(resolve(m[1] || m[2])) // resolves from cwd
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
// E.g. path.join(import.meta.dirname, './fixtures/data.json'), dirname is inlined by loadPipeline already
|
|
296
|
+
const dir = dirname(filepath)
|
|
297
|
+
for (const [, readFileCall, first, secondA, secondB] of source.matchAll(
|
|
298
|
+
/(readFile(?:Sync)?\()?(?:path\.)?join\(\s*("[^"\\]+"),\s*(?:"([^"\\]+)"|'([^'\\]+)')\s*\)/gu
|
|
299
|
+
)) {
|
|
300
|
+
if (first !== JSON.stringify(dir)) continue // only allow files relative to dirname, from loadPipeline
|
|
301
|
+
const file = resolve(dir, secondA || secondB)
|
|
302
|
+
if (readFileCall || aggressiveExtensions.test(file)) await fsFilesAdd(file) // only bundle bare path.join for specific extensions used as test fixtures
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
// Both conditions should happen for deep fixtures inclusion
|
|
306
|
+
if (/(readdir|readFile|exists)Sync/u.test(source)) await fsFilesBundleFixtures('fs')
|
|
307
|
+
if (fixturesRegex.test(source)) await fsFilesBundleFixtures('fixtures')
|
|
308
|
+
|
|
309
|
+
// Resolve require.resolve and bundle those files for fixture or json extensions (e.g. package.json)
|
|
310
|
+
let filepathRequire
|
|
311
|
+
const toAdd = []
|
|
312
|
+
const res = source.replace(
|
|
313
|
+
/\b(require|import\.meta)\.resolve\(\s*(?:"([^"\\]+)"|'([^'\\]+)')\s*\)/gu,
|
|
314
|
+
(orig, cause, a, b) => {
|
|
315
|
+
if (!filepathRequire) filepathRequire = createRequire(filepath)
|
|
316
|
+
try {
|
|
317
|
+
const file = filepathRequire.resolve(a || b)
|
|
318
|
+
if (aggressiveExtensions.test(file)) toAdd.push(file) // load resolved files for specific extensions
|
|
319
|
+
if (cause === 'require') return `(${stringify(file)})`
|
|
320
|
+
// Do not replace import.meta.resolve for non-fixture extensions, might cause misresolutions
|
|
321
|
+
return aggressiveExtensions.test(file) ? `(${stringify(pathToFileURL(file))})` : orig
|
|
322
|
+
} catch {
|
|
323
|
+
return orig
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
)
|
|
327
|
+
|
|
328
|
+
for (const file of toAdd) await fsFilesAdd(file)
|
|
329
|
+
|
|
330
|
+
return res
|
|
331
|
+
})
|
|
332
|
+
|
|
333
|
+
if (files.length === 1) {
|
|
334
|
+
const main = resolve(files[0])
|
|
335
|
+
specificLoadPipeline.push((source, filepath) => {
|
|
336
|
+
return source.replaceAll('(require.main === module)', `(${filepath === main})`)
|
|
337
|
+
})
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
const api = (f) => resolveRequire(`./modules/${f}`)
|
|
341
|
+
const nodeUnprefixed = {
|
|
342
|
+
assert: dirname(dirname(resolveRequire('assert/'))),
|
|
343
|
+
'assert/strict': api('assert-strict.cjs'),
|
|
344
|
+
buffer: hasNodeApis ? api('node-buffer.cjs') : dirname(resolveRequire('buffer/')),
|
|
345
|
+
child_process: api('child_process.cjs'),
|
|
346
|
+
constants: resolveRequire('constants-browserify'),
|
|
347
|
+
cluster: api('cluster.cjs'),
|
|
348
|
+
events: dirname(resolveRequire('events/')),
|
|
349
|
+
fs: api('fs.cjs'),
|
|
350
|
+
'fs/promises': api('fs-promises.cjs'),
|
|
351
|
+
http: api('http.cjs'),
|
|
352
|
+
https: api('https.cjs'),
|
|
353
|
+
module: api('module.cjs'),
|
|
354
|
+
os: resolveRequire('os-browserify/browser.js'), // 'main' entry point is noop, we want browser entry
|
|
355
|
+
path: resolveRequire('path-browserify'),
|
|
356
|
+
querystring: resolveRequire('querystring-es3'),
|
|
357
|
+
stream: resolveRequire('stream-browserify'),
|
|
358
|
+
timers: resolveRequire('timers-browserify'),
|
|
359
|
+
tty: api('tty.cjs'),
|
|
360
|
+
url: api('url.cjs'),
|
|
361
|
+
util: api('util.cjs'),
|
|
362
|
+
zlib: resolveRequire('browserify-zlib'),
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
try {
|
|
366
|
+
if (require.resolve('crypto-browserify')) nodeUnprefixed.crypto = api('crypto.cjs')
|
|
367
|
+
} catch {}
|
|
368
|
+
|
|
369
|
+
const config = {
|
|
370
|
+
logLevel: 'silent',
|
|
371
|
+
stdin: {
|
|
372
|
+
contents: `(async function () {\n${main}\n})()`,
|
|
373
|
+
resolveDir: dirname(fileURLToPath(import.meta.url)),
|
|
374
|
+
},
|
|
375
|
+
bundle: true,
|
|
376
|
+
outdir: options.outdir,
|
|
377
|
+
entryNames: filename,
|
|
378
|
+
platform: 'browser', // Need to follow "browser" field of package.json
|
|
379
|
+
// conditions: ['browser', 'react-native'], // TODO
|
|
380
|
+
mainFields: ['browser', 'module', 'main'],
|
|
381
|
+
define: {
|
|
382
|
+
'process.browser': stringify(true),
|
|
383
|
+
'process.emitWarning': 'undefined',
|
|
384
|
+
'process.stderr': 'undefined',
|
|
385
|
+
'process.stdout': 'undefined',
|
|
386
|
+
'process.type': 'undefined',
|
|
387
|
+
'process.platform': 'undefined',
|
|
388
|
+
'process.version': stringify('v22.15.0'), // shouldn't depend on currently used Node.js version
|
|
389
|
+
'process.versions.node': stringify('22.15.0'), // see line above
|
|
390
|
+
EXODUS_TEST_PROCESS_CWD: stringify(process.cwd()),
|
|
391
|
+
EXODUS_TEST_FILES: stringify(files.map((f) => [dirname(f), basename(f)])),
|
|
392
|
+
EXODUS_TEST_SNAPSHOTS: stringify(EXODUS_TEST_SNAPSHOTS),
|
|
393
|
+
EXODUS_TEST_RECORDINGS: stringify(EXODUS_TEST_RECORDINGS),
|
|
394
|
+
EXODUS_TEST_FSFILES: stringify(fsfiles), // TODO: can we safely use relative paths?
|
|
395
|
+
EXODUS_TEST_FSFILES_CONTENTS: stringify([...fsFilesContents.entries()]),
|
|
396
|
+
EXODUS_TEST_FSDIRS: stringify([...fsFilesDirs.entries()]),
|
|
397
|
+
},
|
|
398
|
+
alias: {
|
|
399
|
+
// Jest, tape and node:test
|
|
400
|
+
'@jest/globals': resolveImport('../src/jest.js'),
|
|
401
|
+
tape: resolveImport('../src/tape.cjs'),
|
|
402
|
+
'tape-promise/tape': resolveImport('../src/tape.cjs'),
|
|
403
|
+
'node:test': resolveImport('../src/node.js'),
|
|
404
|
+
'micro-should': resolveImport('../src/jest.js'),
|
|
405
|
+
// For cross-dir usage
|
|
406
|
+
'@exodus/test/jest': resolveImport('../src/jest.js'),
|
|
407
|
+
'@exodus/test/tape': resolveImport('../src/tape.cjs'),
|
|
408
|
+
'@exodus/test/node': resolveImport('../src/node.js'),
|
|
409
|
+
// Inner
|
|
410
|
+
'exodus-test:text-encoding-utf': api('text-encoding-utf.cjs'),
|
|
411
|
+
'exodus-test:util-format': api('util-format.cjs'),
|
|
412
|
+
// Node.js (except node:test)
|
|
413
|
+
...Object.fromEntries(Object.entries(nodeUnprefixed).map(([k, v]) => [`node:${k}`, v])),
|
|
414
|
+
...nodeUnprefixed,
|
|
415
|
+
// Needed for polyfills but name conflicts with Node.js modules
|
|
416
|
+
'url/url.js': resolveRequire('url/url.js'),
|
|
417
|
+
'util/util.js': resolveRequire('util/util.js'),
|
|
418
|
+
// expect-related deps
|
|
419
|
+
'ansi-styles': api('ansi-styles.cjs'),
|
|
420
|
+
'jest-util': api('jest-util.js'),
|
|
421
|
+
'jest-message-util': api('jest-message-util.js'),
|
|
422
|
+
// unwanted deps
|
|
423
|
+
bindings: api('empty/function-throw.cjs'),
|
|
424
|
+
'node-gyp-build': api('empty/function-throw.cjs'),
|
|
425
|
+
ws: api('ws.cjs'),
|
|
426
|
+
},
|
|
427
|
+
sourcemap:
|
|
428
|
+
options.platform === 'xs' ? false : process.env.EXODUS_TEST_IS_BAREBONE ? 'inline' : 'linked', // FIXME?
|
|
429
|
+
sourcesContent: false,
|
|
430
|
+
keepNames: true,
|
|
431
|
+
format: 'iife',
|
|
432
|
+
target: options.target || `node${process.versions.node}`,
|
|
433
|
+
supported: {
|
|
434
|
+
bigint: true,
|
|
435
|
+
...(options.platform === 'hermes' ? hermesSupported : {}),
|
|
436
|
+
},
|
|
437
|
+
plugins: [
|
|
438
|
+
{
|
|
439
|
+
name: 'exodus-test.bundle',
|
|
440
|
+
setup({ onResolve, onLoad }) {
|
|
441
|
+
onResolve({ filter: /\.[cm]?[jt]sx?$/ }, (args) => {
|
|
442
|
+
if (shouldInstallMocks && cjsMockRegex.test(args.path)) {
|
|
443
|
+
return { path: args.path, namespace: 'file' }
|
|
444
|
+
}
|
|
445
|
+
})
|
|
446
|
+
onLoad({ filter: /\.[cm]?[jt]sx?$/, namespace: 'file' }, async (args) => {
|
|
447
|
+
let filepath = args.path
|
|
448
|
+
// Resolve .native versions
|
|
449
|
+
// TODO: maybe follow package.json for this
|
|
450
|
+
if (process.env.EXODUS_TEST_IS_BAREBONE) {
|
|
451
|
+
const maybeNative = filepath.replace(/(\.[cm]?[jt]sx?)$/u, '.native$1')
|
|
452
|
+
if (existsSync(maybeNative)) filepath = maybeNative
|
|
453
|
+
}
|
|
454
|
+
|
|
455
|
+
const loader = extname(filepath).replace(/^\.[cm]?/, '') // TODO: a flag to force jsx/tsx perhaps
|
|
456
|
+
assert(['js', 'ts', 'jsx', 'tsx'].includes(loader))
|
|
457
|
+
|
|
458
|
+
return { contents: await loadSourceFile(filepath), loader }
|
|
459
|
+
})
|
|
460
|
+
},
|
|
461
|
+
},
|
|
462
|
+
],
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
let shouldInstallMocks = false
|
|
466
|
+
const mocked = new Set()
|
|
467
|
+
specificLoadPipeline.push(async (source, filepath) => {
|
|
468
|
+
if (shouldInstallMocks) {
|
|
469
|
+
if (cjsMockRegex.test(filepath)) return cjsMockFallback
|
|
470
|
+
if (mocked.has(filepath) && !filepath.endsWith('.cjs') && /^export\b/mu.test(source)) {
|
|
471
|
+
const mock = stringify(`${filepath}.exodus-test-mock.cjs`)
|
|
472
|
+
const def = 'x.__esModule ? x.default : (x.default ?? x)'
|
|
473
|
+
return `export * from ${mock}\nvar x = require(${mock})\nexport default ${def}`
|
|
474
|
+
}
|
|
475
|
+
}
|
|
476
|
+
|
|
477
|
+
// 'await import' is replaced only in files with mocks (likely toplevel there)
|
|
478
|
+
// Otherwise we don't patch module system at all
|
|
479
|
+
if (!source.includes('jest.doMock(') && !source.includes('jest.mock(')) return source
|
|
480
|
+
shouldInstallMocks = true
|
|
481
|
+
const filepathRequire = createRequire(filepath)
|
|
482
|
+
return source
|
|
483
|
+
.replaceAll(/\bawait (import\((?:"[^"\\]+"|'[^'\\]+')\))/gu, 'EXODUS_TEST_SYNC_IMPORT($1)')
|
|
484
|
+
.replaceAll(
|
|
485
|
+
/\bjest\.(doMock|mock|requireActual|requireMock)\(\s*("[^"\\]+"|'[^'\\]+')/gu,
|
|
486
|
+
(_, method, raw) => {
|
|
487
|
+
try {
|
|
488
|
+
const arg = JSON.parse(raw[0] === "'" ? raw.replaceAll("'", '"') : raw) // fine because it doesn't have quotes or \
|
|
489
|
+
const { alias } = config
|
|
490
|
+
const file = Object.hasOwn(alias, arg) ? alias[arg] : filepathRequire.resolve(arg) // throws when not resolved
|
|
491
|
+
assert(existsSync(file), `File ${file} does not exist`)
|
|
492
|
+
const builtin = stringify(Object.hasOwn(alias, arg) ? arg.replace(/^node:/, '') : null)
|
|
493
|
+
const id = `bundle:${relative(cwd, file)}`
|
|
494
|
+
if (method.startsWith('require')) return `jest.${method}(${stringify(id)}`
|
|
495
|
+
mocked.add(file)
|
|
496
|
+
return `jest.__${method}Bundle(${stringify(id)},${builtin},()=>require(${raw})`
|
|
497
|
+
} catch (err) {
|
|
498
|
+
console.error(err)
|
|
499
|
+
throw new Error(`Failed to mock ${raw}: not resolved`, { cause: err })
|
|
500
|
+
}
|
|
501
|
+
}
|
|
502
|
+
)
|
|
503
|
+
})
|
|
504
|
+
|
|
505
|
+
if (files.length === 1) {
|
|
506
|
+
config.define['process.argv'] = stringify(['exodus-test', resolve(files[0])])
|
|
507
|
+
}
|
|
508
|
+
|
|
509
|
+
if (options.platform !== 'bun') config.define['process.versions.bun'] = 'undefined'
|
|
510
|
+
if (options.platform !== 'deno') config.define['process.versions.deno'] = 'undefined'
|
|
511
|
+
if (options.platform !== 'electron') config.define['process.versions.electron'] = 'undefined'
|
|
512
|
+
if (!hasNodeApis) {
|
|
513
|
+
config.define['process.cwd'] = 'EXODUS_TEST_PROCESS.cwd'
|
|
514
|
+
config.define['process.exit'] = 'EXODUS_TEST_PROCESS.exit'
|
|
515
|
+
}
|
|
516
|
+
|
|
517
|
+
let res = await buildWrap(config)
|
|
518
|
+
assert.equal(res instanceof Error, res.errors.length > 0)
|
|
519
|
+
|
|
520
|
+
if (fsFilesContents.size > 0 || fsFilesDirs.size > 0) {
|
|
521
|
+
// re-run as we detected that tests depend on fsReadFileSync contents
|
|
522
|
+
config.define.EXODUS_TEST_FSFILES_CONTENTS = stringify([...fsFilesContents.entries()])
|
|
523
|
+
config.define.EXODUS_TEST_FSDIRS = stringify([...fsFilesDirs.entries()])
|
|
524
|
+
res = await buildWrap(config)
|
|
525
|
+
assert.equal(res instanceof Error, res.errors.length > 0)
|
|
526
|
+
}
|
|
527
|
+
|
|
528
|
+
if (res.errors.length === 0 && shouldInstallMocks) {
|
|
529
|
+
const code = await readFile(outfile, 'utf8')
|
|
530
|
+
const heads = {
|
|
531
|
+
esm: /(var __esm = (?:function)?\((fn[\d]*), res[\d]*\)\s*(?:=>|\{\s*return)\s*)(function __init[\d]*\(\) \{)/u,
|
|
532
|
+
cjs: /(var __commonJS = (?:function)?\((cb[\d]*), mod[\d]*\)\s*(?:=>|\{\s*return)\s*)(function __require[\d]*\(\) \{)/u,
|
|
533
|
+
}
|
|
534
|
+
const k = '__getOwnPropNames($2)[0]'
|
|
535
|
+
const mock = (p, l, v) =>
|
|
536
|
+
`var ${p}=new Set(),${l}=new Set(),${v}=new Map();$1${p}.add(${k}) && $3;{const k=${k};${l}.add(k);if (${v}.has(k))return ${v}.get(k)};`
|
|
537
|
+
assert(heads.esm.test(code) && heads.cjs.test(code), 'Failed to match for module mocks')
|
|
538
|
+
const patched = code
|
|
539
|
+
.replace(heads.esm, mock('__mocksESMPossible', '__mocksESMLoaded', '__mocksESM')) // __mocksESM actually doesn't work
|
|
540
|
+
.replace(heads.cjs, mock('__mocksCJSPossible', '__mocksCJSLoaded', '__mocksCJS'))
|
|
541
|
+
.replaceAll('EXODUS_TEST_SYNC_IMPORT(Promise.resolve().then(', '((f=>f())(')
|
|
542
|
+
assert(!patched.includes('EXODUS_TEST_SYNC_IMPORT'), "Failed to fix 'await import'")
|
|
543
|
+
await writeFile(outfile, patched)
|
|
544
|
+
}
|
|
545
|
+
|
|
546
|
+
// if (res.errors.length === 0) require('fs').copyFileSync(outfile, 'tempout.cjs') // DEBUG
|
|
547
|
+
|
|
548
|
+
// We treat warnings as errors, so just merge all them
|
|
549
|
+
const errors = []
|
|
550
|
+
const formatOpts = { color: process.stdout.hasColors?.(), terminalWidth: process.stdout.columns }
|
|
551
|
+
const formatMessages = (list, kind) => esbuild.formatMessages(list, { kind, ...formatOpts })
|
|
552
|
+
if (res.warnings.length > 0) errors.push(...(await formatMessages(res.warnings, 'warning')))
|
|
553
|
+
if (res.errors.length > 0) errors.push(...(await formatMessages(res.errors, 'error')))
|
|
554
|
+
return { file: outfile, errors }
|
|
555
|
+
}
|