spark-chamber 0.1.0-alpha.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +19 -0
- package/bin/spark.cjs +50 -0
- package/lib/args.js +161 -0
- package/lib/config.js +13 -0
- package/lib/coverage.js +65 -0
- package/lib/force-exit.cjs +10 -0
- package/lib/init.js +36 -0
- package/lib/log.js +30 -0
- package/lib/main.js +97 -0
- package/lib/preload.cjs +26 -0
- package/lib/runner.js +115 -0
- package/lib/spark.js +132 -0
- package/lib/stream.js +92 -0
- package/lib/ui.cjs +52 -0
- package/package.json +59 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
Copyright (c) 2026 Sylvester Keil
|
|
2
|
+
|
|
3
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
|
4
|
+
this software and associated documentation files (the " Software"), to deal in
|
|
5
|
+
the Software without restriction, including without limitation the rights to
|
|
6
|
+
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
|
7
|
+
the Software, and to permit persons to whom the Software is furnished to do so,
|
|
8
|
+
subject to the following conditions:
|
|
9
|
+
|
|
10
|
+
The above copyright notice and this permission notice (including the next
|
|
11
|
+
paragraph) shall be included in all copies or substantial portions of the
|
|
12
|
+
Software.
|
|
13
|
+
|
|
14
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
15
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
|
16
|
+
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
|
17
|
+
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
|
18
|
+
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
|
19
|
+
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
package/bin/spark.cjs
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
'use strict'
|
|
4
|
+
|
|
5
|
+
const console = require('node:console')
|
|
6
|
+
const process = require('node:process')
|
|
7
|
+
const { join } = require('node:path')
|
|
8
|
+
const { spawn } = require('node:child_process')
|
|
9
|
+
|
|
10
|
+
function resolve (module, resolver = require) {
|
|
11
|
+
try {
|
|
12
|
+
return resolver(module)
|
|
13
|
+
} catch {
|
|
14
|
+
return null
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
function run (electron) {
|
|
19
|
+
if (!electron) {
|
|
20
|
+
console.error(`
|
|
21
|
+
Cannot find 'electron' and $ELECTRON_PATH is not set.
|
|
22
|
+
Either set $ELECTRON_PATH or run 'npm install electron'.`)
|
|
23
|
+
|
|
24
|
+
process.exit(1)
|
|
25
|
+
} else {
|
|
26
|
+
let args = [
|
|
27
|
+
join(__dirname, '..'),
|
|
28
|
+
...process.argv.slice(2)
|
|
29
|
+
]
|
|
30
|
+
|
|
31
|
+
let child = spawn(electron, args)
|
|
32
|
+
|
|
33
|
+
child.stdout.pipe(process.stdout)
|
|
34
|
+
child.stderr.pipe(process.stderr)
|
|
35
|
+
|
|
36
|
+
child.on('exit', (code, signal) => {
|
|
37
|
+
if (signal)
|
|
38
|
+
process.kill(process.pid, signal)
|
|
39
|
+
else
|
|
40
|
+
process.exit(code)
|
|
41
|
+
})
|
|
42
|
+
|
|
43
|
+
process.on('SIGINT', () => {
|
|
44
|
+
child.kill('SIGINT')
|
|
45
|
+
child.kill('SIGTERM')
|
|
46
|
+
})
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
run(process.env.ELECTRON_PATH || resolve('electron'))
|
package/lib/args.js
ADDED
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
import { existsSync } from 'node:fs'
|
|
2
|
+
import * as reporters from 'node:test/reporters'
|
|
3
|
+
import { parseArgs } from 'node:util'
|
|
4
|
+
import config, { configure } from './config.js'
|
|
5
|
+
|
|
6
|
+
export const options = {
|
|
7
|
+
help: {
|
|
8
|
+
type: 'boolean',
|
|
9
|
+
short: 'h'
|
|
10
|
+
},
|
|
11
|
+
version: {
|
|
12
|
+
type: 'boolean',
|
|
13
|
+
short: 'v'
|
|
14
|
+
},
|
|
15
|
+
verbose: {
|
|
16
|
+
type: 'boolean',
|
|
17
|
+
short: 'V',
|
|
18
|
+
default: config.verbose
|
|
19
|
+
},
|
|
20
|
+
concurrency: {
|
|
21
|
+
type: 'string',
|
|
22
|
+
short: 'c'
|
|
23
|
+
},
|
|
24
|
+
'name-pattern': {
|
|
25
|
+
type: 'string',
|
|
26
|
+
short: 'g',
|
|
27
|
+
multiple: true
|
|
28
|
+
},
|
|
29
|
+
only: {
|
|
30
|
+
type: 'boolean'
|
|
31
|
+
},
|
|
32
|
+
'rerun-failures': {
|
|
33
|
+
type: 'string'
|
|
34
|
+
},
|
|
35
|
+
renderer: {
|
|
36
|
+
type: 'string',
|
|
37
|
+
short: 'r',
|
|
38
|
+
multiple: true
|
|
39
|
+
},
|
|
40
|
+
'skip-pattern': {
|
|
41
|
+
type: 'string',
|
|
42
|
+
short: 'x',
|
|
43
|
+
multiple: true
|
|
44
|
+
},
|
|
45
|
+
isolation: {
|
|
46
|
+
type: 'string',
|
|
47
|
+
short: 'i'
|
|
48
|
+
},
|
|
49
|
+
timeout: {
|
|
50
|
+
type: 'string',
|
|
51
|
+
short: 't'
|
|
52
|
+
},
|
|
53
|
+
'global-setup': {
|
|
54
|
+
type: 'string',
|
|
55
|
+
short: 'S'
|
|
56
|
+
},
|
|
57
|
+
ui: {
|
|
58
|
+
type: 'string'
|
|
59
|
+
},
|
|
60
|
+
url: {
|
|
61
|
+
type: 'string',
|
|
62
|
+
default: config.url
|
|
63
|
+
},
|
|
64
|
+
reporter: {
|
|
65
|
+
type: 'string',
|
|
66
|
+
short: 'R',
|
|
67
|
+
multiple: true
|
|
68
|
+
},
|
|
69
|
+
destination: {
|
|
70
|
+
type: 'string',
|
|
71
|
+
short: 'O',
|
|
72
|
+
multiple: true
|
|
73
|
+
},
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
export function parse (args) {
|
|
77
|
+
let { values, positionals, tokens } = parseArgs({
|
|
78
|
+
args,
|
|
79
|
+
options,
|
|
80
|
+
tokens: true,
|
|
81
|
+
allowPositionals: true,
|
|
82
|
+
strict: false
|
|
83
|
+
})
|
|
84
|
+
|
|
85
|
+
// Subtle: assuming these are Electron/Chromium switches!
|
|
86
|
+
values.switches = tokens
|
|
87
|
+
.filter(t => t.kind === 'option' && !(t.name in options))
|
|
88
|
+
.map(t => t.rawName)
|
|
89
|
+
|
|
90
|
+
if (positionals.length)
|
|
91
|
+
values.main = positionals
|
|
92
|
+
|
|
93
|
+
if (values.concurrency)
|
|
94
|
+
values.concurrency = Number(values.concurrency)
|
|
95
|
+
|
|
96
|
+
if (values.timeout)
|
|
97
|
+
values.timeout = Number(values.timeout)
|
|
98
|
+
|
|
99
|
+
if (values['name-pattern']) {
|
|
100
|
+
values.testNamePatterns = values['name-pattern']
|
|
101
|
+
delete values['name-pattern']
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
if (values['skip-pattern']) {
|
|
105
|
+
values.testSkipPatterns = values['skip-pattern']
|
|
106
|
+
delete values['skip-pattern']
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
if (values['rerun-failures']) {
|
|
110
|
+
values.rerunFailuresFilePath = values['rerun-failures']
|
|
111
|
+
delete values['rerun-failures']
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
if (values['global-setup']) {
|
|
115
|
+
values.globalSetupPath = values['global-setup']
|
|
116
|
+
delete values['global-setup']
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
values.reporter = values.reporter ?? ['spec']
|
|
120
|
+
values.destination = values.destination ?? ['stdout']
|
|
121
|
+
|
|
122
|
+
validateReporters(values.reporter, values.destination)
|
|
123
|
+
configure(values)
|
|
124
|
+
|
|
125
|
+
return values
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
function validateReporters (names, destinations) {
|
|
129
|
+
if (names.length !== destinations.length)
|
|
130
|
+
throw new Error('Each --reporter must have a matching --destination')
|
|
131
|
+
|
|
132
|
+
for (let name of names) {
|
|
133
|
+
if (!(name in reporters) && !existsSync(name))
|
|
134
|
+
throw new Error(`Unknown reporter: ${name}`)
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
export const usage = () => `Usage: spark [options] [files...]
|
|
139
|
+
|
|
140
|
+
Arguments:
|
|
141
|
+
files main process test files (glob patterns)
|
|
142
|
+
|
|
143
|
+
Options:
|
|
144
|
+
-r, --renderer <files> renderer test files (repeatable, glob)
|
|
145
|
+
-g, --name-pattern <regex> run matching tests (repeatable)
|
|
146
|
+
-x, --skip-pattern <regex> skip matching tests (repeatable)
|
|
147
|
+
-c, --concurrency <n> max concurrent test files
|
|
148
|
+
-i, --isolation <mode> test isolation (none, process)
|
|
149
|
+
-t, --timeout <ms> test timeout in ms
|
|
150
|
+
--ui <name> test interface (bdd, tdd)
|
|
151
|
+
--url <url> renderer page URL
|
|
152
|
+
--only run tests with { only: true }
|
|
153
|
+
-S, --global-setup <path> module to run before tests
|
|
154
|
+
--rerun-failures <path> rerun failures state file
|
|
155
|
+
-R, --reporter <name> reporter name or path (repeatable)
|
|
156
|
+
-O, --destination <path> output per reporter (repeatable)
|
|
157
|
+
--no-sandbox disable Chromium sandbox
|
|
158
|
+
-V, --verbose verbose output
|
|
159
|
+
-h, --help show this help
|
|
160
|
+
-v, --version show version
|
|
161
|
+
`
|
package/lib/config.js
ADDED
package/lib/coverage.js
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
export async function startInspector (out) {
|
|
2
|
+
let { Session } = await import('node:inspector/promises')
|
|
3
|
+
let session = new Session()
|
|
4
|
+
session.connect()
|
|
5
|
+
await session.post('Profiler.enable')
|
|
6
|
+
await session.post('Profiler.startPreciseCoverage', {
|
|
7
|
+
callCount: true,
|
|
8
|
+
detailed: true
|
|
9
|
+
})
|
|
10
|
+
|
|
11
|
+
return async () => {
|
|
12
|
+
let { result } = await session.post('Profiler.takePreciseCoverage')
|
|
13
|
+
await session.post('Profiler.stopPreciseCoverage')
|
|
14
|
+
await session.post('Profiler.disable')
|
|
15
|
+
session.disconnect()
|
|
16
|
+
return writeCoverage(out, result)
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export async function startDebugger (out, webContents) {
|
|
21
|
+
let dbg = webContents.debugger
|
|
22
|
+
dbg.attach()
|
|
23
|
+
await dbg.sendCommand('Profiler.enable')
|
|
24
|
+
await dbg.sendCommand('Profiler.startPreciseCoverage', {
|
|
25
|
+
callCount: true,
|
|
26
|
+
detailed: true
|
|
27
|
+
})
|
|
28
|
+
|
|
29
|
+
return async () => {
|
|
30
|
+
let { result } = await dbg.sendCommand('Profiler.takePreciseCoverage')
|
|
31
|
+
await dbg.sendCommand('Profiler.stopPreciseCoverage')
|
|
32
|
+
await dbg.sendCommand('Profiler.disable')
|
|
33
|
+
dbg.detach()
|
|
34
|
+
return writeCoverage(out, result)
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
function writeCoverage (out, result) {
|
|
39
|
+
let files = []
|
|
40
|
+
|
|
41
|
+
for (let entry of result) {
|
|
42
|
+
if (!entry.url.startsWith('file://')) continue
|
|
43
|
+
|
|
44
|
+
let functions = []
|
|
45
|
+
for (let fn of entry.functions) {
|
|
46
|
+
if (fn.functionName) {
|
|
47
|
+
functions.push({
|
|
48
|
+
name: fn.functionName,
|
|
49
|
+
count: fn.ranges[0].count
|
|
50
|
+
})
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
if (functions.length) {
|
|
55
|
+
files.push({
|
|
56
|
+
path: new URL(entry.url).pathname,
|
|
57
|
+
functions
|
|
58
|
+
})
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
let data = { nesting: 0, summary: { files } }
|
|
63
|
+
out?.write({ type: 'test:coverage', data })
|
|
64
|
+
return data
|
|
65
|
+
}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
const process = require('node:process')
|
|
2
|
+
const { after } = require('node:test')
|
|
3
|
+
|
|
4
|
+
// Because Electron's event loop does not drain,
|
|
5
|
+
// tests spawned with isolation: 'process' never exit.
|
|
6
|
+
// The after hook runs when all tests complete via root.run(),
|
|
7
|
+
// giving us a chance to force exit.
|
|
8
|
+
after(() => {
|
|
9
|
+
setImmediate(() => process.exit())
|
|
10
|
+
})
|
package/lib/init.js
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import { mkdtempSync } from 'node:fs'
|
|
2
|
+
import { tmpdir } from 'node:os'
|
|
3
|
+
import { join } from 'node:path'
|
|
4
|
+
import { app, protocol } from 'electron'
|
|
5
|
+
|
|
6
|
+
export function registerScheme () {
|
|
7
|
+
protocol.registerSchemesAsPrivileged([{
|
|
8
|
+
scheme: 'spark',
|
|
9
|
+
privileges: { standard: true, secure: true }
|
|
10
|
+
}])
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export function handleScheme () {
|
|
14
|
+
if (!protocol.isProtocolHandled('spark')) {
|
|
15
|
+
protocol.handle('spark', () => new Response('<html></html>', {
|
|
16
|
+
headers: { 'content-type': 'text/html' }
|
|
17
|
+
}))
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
export function setupPaths () {
|
|
22
|
+
let tmp = mkdtempSync(join(tmpdir(), `${app.getName()}-`))
|
|
23
|
+
|
|
24
|
+
app.setPath('appData', tmp)
|
|
25
|
+
app.setPath('cache', join(tmp, 'cache'))
|
|
26
|
+
app.setPath('logs', join(tmp, 'logs'))
|
|
27
|
+
app.setPath('crashDumps', join(tmp, 'crashDumps'))
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
if (!app.isReady()) {
|
|
31
|
+
if (app.getName() === 'spark')
|
|
32
|
+
setupPaths()
|
|
33
|
+
|
|
34
|
+
registerScheme()
|
|
35
|
+
app.whenReady().then(handleScheme)
|
|
36
|
+
}
|
package/lib/log.js
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { styleText } from 'node:util'
|
|
2
|
+
import config from './config.js'
|
|
3
|
+
|
|
4
|
+
export default function log (level, ...args) {
|
|
5
|
+
log[level ?? 'info']?.(...args)
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export function debug (...args) {
|
|
9
|
+
if (config.verbose)
|
|
10
|
+
console.error(styleText('gray', args.shift()), ...args)
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export function info (...args) {
|
|
14
|
+
console.log(...args)
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export function warn (...args) {
|
|
18
|
+
console.warn(styleText('yellow', `[WARN]: ${args.shift()}`), ...args)
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
export function error (err, ...args) {
|
|
22
|
+
if (err instanceof Error) {
|
|
23
|
+
console.error(styleText('red', `[ERROR]: ${err.message}`), ...args)
|
|
24
|
+
console.error(styleText('gray', err.stack))
|
|
25
|
+
} else {
|
|
26
|
+
console.error(styleText('red', `[ERROR]: ${err}`), ...args)
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
Object.assign(log, { debug, info, warn, error })
|
package/lib/main.js
ADDED
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
import { createWriteStream } from 'node:fs'
|
|
2
|
+
import { resolve } from 'node:path'
|
|
3
|
+
import process from 'node:process'
|
|
4
|
+
import { pathToFileURL } from 'node:url'
|
|
5
|
+
import * as reporters from 'node:test/reporters'
|
|
6
|
+
import { app } from 'electron'
|
|
7
|
+
import './init.js'
|
|
8
|
+
import log, { debug, info, error } from './log.js'
|
|
9
|
+
import { parse, usage } from './args.js'
|
|
10
|
+
import { run } from './spark.js'
|
|
11
|
+
import { report, tap } from './stream.js'
|
|
12
|
+
|
|
13
|
+
try {
|
|
14
|
+
let opts = parse()
|
|
15
|
+
|
|
16
|
+
if (opts.help)
|
|
17
|
+
quit(usage())
|
|
18
|
+
if (opts.version)
|
|
19
|
+
quit(`${app.getVersion()} (${process.versions.electron})`)
|
|
20
|
+
|
|
21
|
+
debug(`Using: ${app.getPath('appData')}`)
|
|
22
|
+
|
|
23
|
+
delete process.env.NODE_TEST_CONTEXT
|
|
24
|
+
let setup = maybeImport(opts.globalSetupPath)
|
|
25
|
+
|
|
26
|
+
app
|
|
27
|
+
.on('window-all-closed', () => {})
|
|
28
|
+
.whenReady()
|
|
29
|
+
.then(() => setup)
|
|
30
|
+
.then(async ({ globalSetup, globalTeardown }) => {
|
|
31
|
+
await globalSetup?.()
|
|
32
|
+
try {
|
|
33
|
+
return runTests(opts)
|
|
34
|
+
} finally {
|
|
35
|
+
await globalTeardown?.()
|
|
36
|
+
}
|
|
37
|
+
})
|
|
38
|
+
.then(([summary]) => {
|
|
39
|
+
app.exit(summary.counts.failed)
|
|
40
|
+
})
|
|
41
|
+
.catch(fail)
|
|
42
|
+
|
|
43
|
+
} catch (e) {
|
|
44
|
+
fail(e)
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
async function runTests ({
|
|
48
|
+
reporter,
|
|
49
|
+
destination,
|
|
50
|
+
...opts
|
|
51
|
+
}) {
|
|
52
|
+
let source = run({
|
|
53
|
+
...opts,
|
|
54
|
+
onConsole (e) { log(e.level, e.message) }
|
|
55
|
+
})
|
|
56
|
+
|
|
57
|
+
return report(
|
|
58
|
+
source,
|
|
59
|
+
await resolveReporters(reporter, destination),
|
|
60
|
+
tap('test:summary')
|
|
61
|
+
)
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
function maybeImport (moduleName) {
|
|
65
|
+
return moduleName
|
|
66
|
+
? import(pathToFileURL(resolve(moduleName)))
|
|
67
|
+
: Promise.resolve({})
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
function resolveReporters (names, destinations) {
|
|
71
|
+
return Promise.all(names.map(async (name, i) => [
|
|
72
|
+
await resolveReporter(name),
|
|
73
|
+
resolveDestination(destinations[i])
|
|
74
|
+
]))
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
function resolveReporter (name) {
|
|
78
|
+
return (name in reporters)
|
|
79
|
+
? reporters[name]
|
|
80
|
+
: import(pathToFileURL(resolve(name))).then(m => m.default)
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
function resolveDestination (dest) {
|
|
84
|
+
if (dest === 'stdout') return process.stdout
|
|
85
|
+
if (dest === 'stderr') return process.stderr
|
|
86
|
+
return createWriteStream(dest)
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
function quit (reason) {
|
|
90
|
+
info(reason)
|
|
91
|
+
app.exit(0)
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
function fail (err) {
|
|
95
|
+
error(err)
|
|
96
|
+
app.exit(1)
|
|
97
|
+
}
|
package/lib/preload.cjs
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
const { argv } = require('node:process')
|
|
2
|
+
const { ipcRenderer } = require('electron')
|
|
3
|
+
const { createTestRunner } = require('./runner.js')
|
|
4
|
+
|
|
5
|
+
window.ELECTRON_DISABLE_SECURITY_WARNINGS = true
|
|
6
|
+
|
|
7
|
+
let opts = JSON.parse(
|
|
8
|
+
argv.find(a => a.startsWith('--spark=')).slice(8)
|
|
9
|
+
)
|
|
10
|
+
|
|
11
|
+
ipcRenderer.send('spark:ready')
|
|
12
|
+
|
|
13
|
+
ipcRenderer.on('spark:start', () => {
|
|
14
|
+
createTestRunner({
|
|
15
|
+
...opts,
|
|
16
|
+
setup (stream) {
|
|
17
|
+
stream
|
|
18
|
+
.on('data', (event) => {
|
|
19
|
+
ipcRenderer.send('spark:event', event)
|
|
20
|
+
})
|
|
21
|
+
.on('end', () => {
|
|
22
|
+
ipcRenderer.send('spark:done')
|
|
23
|
+
})
|
|
24
|
+
}
|
|
25
|
+
})
|
|
26
|
+
})
|
package/lib/runner.js
ADDED
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
import assert from 'node:assert/strict'
|
|
2
|
+
import { join } from 'node:path'
|
|
3
|
+
import process from 'node:process'
|
|
4
|
+
import test from 'node:test'
|
|
5
|
+
import { expose } from './ui.cjs'
|
|
6
|
+
|
|
7
|
+
const FORCE_EXIT = join(import.meta.dirname, 'force-exit.cjs')
|
|
8
|
+
const UI = join(import.meta.dirname, 'ui.cjs')
|
|
9
|
+
|
|
10
|
+
// Electron doesn't parse Node test flags from argv.
|
|
11
|
+
// Build NODE_OPTIONS to forward them to child processes.
|
|
12
|
+
function nodeOptions ({
|
|
13
|
+
only,
|
|
14
|
+
testNamePatterns,
|
|
15
|
+
testSkipPatterns,
|
|
16
|
+
timeout
|
|
17
|
+
}) {
|
|
18
|
+
let opts = []
|
|
19
|
+
|
|
20
|
+
if (testNamePatterns)
|
|
21
|
+
for (let p of testNamePatterns)
|
|
22
|
+
opts.push(`--test-name-pattern=${p}`)
|
|
23
|
+
|
|
24
|
+
if (testSkipPatterns)
|
|
25
|
+
for (let p of testSkipPatterns)
|
|
26
|
+
opts.push(`--test-skip-pattern=${p}`)
|
|
27
|
+
|
|
28
|
+
if (only)
|
|
29
|
+
opts.push('--test-only')
|
|
30
|
+
|
|
31
|
+
if (timeout != null)
|
|
32
|
+
opts.push(`--test-timeout=${timeout}`)
|
|
33
|
+
|
|
34
|
+
return opts.join(' ')
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
// Tests with isolation: 'none' do not terminate
|
|
38
|
+
// because they wait for the beforeExit event.
|
|
39
|
+
// See https://github.com/nodejs/node/issues/57234
|
|
40
|
+
function emitBeforeExit (stream) {
|
|
41
|
+
let pending = 0
|
|
42
|
+
|
|
43
|
+
stream
|
|
44
|
+
.on('test:enqueue', () => {
|
|
45
|
+
++pending
|
|
46
|
+
})
|
|
47
|
+
.on('test:complete', () => {
|
|
48
|
+
if (--pending === 0)
|
|
49
|
+
setImmediate(() => process.emit('beforeExit'))
|
|
50
|
+
})
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
export function createTestRunner ({
|
|
54
|
+
coverage = false,
|
|
55
|
+
env = process.env,
|
|
56
|
+
execArgv = [],
|
|
57
|
+
isolation = 'none',
|
|
58
|
+
rerunFailuresFilePath,
|
|
59
|
+
setup,
|
|
60
|
+
ui,
|
|
61
|
+
watch = false,
|
|
62
|
+
...opts
|
|
63
|
+
} = {}) {
|
|
64
|
+
if (process.type === 'renderer') {
|
|
65
|
+
assert.equal(isolation, 'none',
|
|
66
|
+
'Test isolation not supported in renderer process')
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
switch (isolation) {
|
|
70
|
+
case 'none': {
|
|
71
|
+
expose(ui)
|
|
72
|
+
// For testNamePatterns, testSkipPatterns, only
|
|
73
|
+
// See https://github.com/nodejs/node/issues/57399
|
|
74
|
+
assert(!coverage,
|
|
75
|
+
'Test coverage not supported without isolation')
|
|
76
|
+
assert(!watch,
|
|
77
|
+
'Test watch option not supported without isolation')
|
|
78
|
+
break
|
|
79
|
+
}
|
|
80
|
+
case 'process': {
|
|
81
|
+
execArgv = ['--require', FORCE_EXIT, ...execArgv]
|
|
82
|
+
|
|
83
|
+
opts.env = {
|
|
84
|
+
...env,
|
|
85
|
+
NODE_OPTIONS: [
|
|
86
|
+
env.NODE_OPTIONS,
|
|
87
|
+
nodeOptions(opts)
|
|
88
|
+
].filter(Boolean).join(' ')
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
if (ui) {
|
|
92
|
+
execArgv.push('--require', UI)
|
|
93
|
+
opts.env.SPARK_UI = ui
|
|
94
|
+
}
|
|
95
|
+
break
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
if (rerunFailuresFilePath)
|
|
100
|
+
rerunFailuresFilePath = `${rerunFailuresFilePath}.${process.type}`
|
|
101
|
+
|
|
102
|
+
return test.run({
|
|
103
|
+
...opts,
|
|
104
|
+
coverage,
|
|
105
|
+
execArgv,
|
|
106
|
+
isolation,
|
|
107
|
+
rerunFailuresFilePath,
|
|
108
|
+
setup (stream) {
|
|
109
|
+
setup?.(stream)
|
|
110
|
+
if (isolation === 'none')
|
|
111
|
+
emitBeforeExit(stream)
|
|
112
|
+
},
|
|
113
|
+
watch
|
|
114
|
+
})
|
|
115
|
+
}
|
package/lib/spark.js
ADDED
|
@@ -0,0 +1,132 @@
|
|
|
1
|
+
import { once } from 'node:events'
|
|
2
|
+
import { join } from 'node:path'
|
|
3
|
+
import { PassThrough } from 'node:stream'
|
|
4
|
+
import { app, BrowserWindow } from 'electron'
|
|
5
|
+
import config from './config.js'
|
|
6
|
+
import { startInspector, startDebugger } from './coverage.js'
|
|
7
|
+
import { combine } from './stream.js'
|
|
8
|
+
import { createTestRunner } from './runner.js'
|
|
9
|
+
|
|
10
|
+
const defaultExecArgv = app.commandLine.hasSwitch('no-sandbox') ? ['--no-sandbox'] : []
|
|
11
|
+
|
|
12
|
+
export function run ({ main, renderer, switches, reporter, destination, help, version, verbose, globalSetupPath, ...opts } = {}) {
|
|
13
|
+
if (switches?.length)
|
|
14
|
+
opts.execArgv = [...(opts.execArgv ?? []), ...switches]
|
|
15
|
+
|
|
16
|
+
let streams = []
|
|
17
|
+
|
|
18
|
+
if (main || !renderer)
|
|
19
|
+
streams.push(runMain({ globPatterns: main, ...opts }))
|
|
20
|
+
|
|
21
|
+
if (renderer)
|
|
22
|
+
streams.push(runRenderer({ globPatterns: renderer, ...opts }))
|
|
23
|
+
|
|
24
|
+
return combine(streams)
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
export function runMain ({ execArgv = defaultExecArgv, ...opts } = {}) {
|
|
28
|
+
if (!opts.coverage) return createTestRunner({ execArgv, ...opts })
|
|
29
|
+
|
|
30
|
+
let out = new PassThrough({ objectMode: true })
|
|
31
|
+
|
|
32
|
+
testMain({ out, execArgv, ...opts })
|
|
33
|
+
.then(() => out.end())
|
|
34
|
+
.catch(err => {
|
|
35
|
+
if (!out.destroyed) out.destroy(err)
|
|
36
|
+
})
|
|
37
|
+
|
|
38
|
+
return out
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
async function testMain ({ out, coverage, ...opts } = {}) {
|
|
42
|
+
if (coverage)
|
|
43
|
+
var stopInspector = await startInspector(out)
|
|
44
|
+
|
|
45
|
+
let stream = createTestRunner(opts)
|
|
46
|
+
let summary
|
|
47
|
+
|
|
48
|
+
for await (let event of stream) {
|
|
49
|
+
out?.write(event)
|
|
50
|
+
|
|
51
|
+
if (event.type === 'test:summary')
|
|
52
|
+
summary = event.data
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
if (coverage)
|
|
56
|
+
var lcov = await stopInspector()
|
|
57
|
+
|
|
58
|
+
return { summary, coverage: lcov }
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
export function runRenderer (opts = {}) {
|
|
62
|
+
let out = new PassThrough({ objectMode: true })
|
|
63
|
+
|
|
64
|
+
testRenderer({ out, ...opts })
|
|
65
|
+
.then(() => out.end())
|
|
66
|
+
.catch(err => {
|
|
67
|
+
if (!out.destroyed) out.destroy(err)
|
|
68
|
+
})
|
|
69
|
+
|
|
70
|
+
return out
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
async function testRenderer ({
|
|
74
|
+
out,
|
|
75
|
+
onConsole,
|
|
76
|
+
coverage,
|
|
77
|
+
url = config.url,
|
|
78
|
+
...opts
|
|
79
|
+
} = {}) {
|
|
80
|
+
let win = new BrowserWindow({
|
|
81
|
+
show: false,
|
|
82
|
+
webPreferences: {
|
|
83
|
+
additionalArguments: [`--spark=${JSON.stringify(opts)}`],
|
|
84
|
+
preload: join(import.meta.dirname, './preload.cjs'),
|
|
85
|
+
sandbox: false,
|
|
86
|
+
backgroundThrottling: false,
|
|
87
|
+
disableDialogs: true,
|
|
88
|
+
spellcheck: false
|
|
89
|
+
}
|
|
90
|
+
})
|
|
91
|
+
|
|
92
|
+
try {
|
|
93
|
+
let ac = new AbortController()
|
|
94
|
+
let { signal } = ac
|
|
95
|
+
let summary
|
|
96
|
+
|
|
97
|
+
win.webContents
|
|
98
|
+
.on('render-process-gone', (_, { exitCode, reason }) => {
|
|
99
|
+
ac.abort(new Error(`Renderer process exited: (${exitCode}) ${reason}`))
|
|
100
|
+
})
|
|
101
|
+
.on('console-message', (details) => {
|
|
102
|
+
onConsole?.(details)
|
|
103
|
+
})
|
|
104
|
+
|
|
105
|
+
win.webContents.ipc.on('spark:event', (_, event) => {
|
|
106
|
+
out?.write(event)
|
|
107
|
+
|
|
108
|
+
if (event.type === 'test:summary')
|
|
109
|
+
summary = event.data
|
|
110
|
+
})
|
|
111
|
+
|
|
112
|
+
win.loadURL(url)
|
|
113
|
+
|
|
114
|
+
await Promise.all([
|
|
115
|
+
once(win.webContents, 'dom-ready', { signal }),
|
|
116
|
+
once(win.webContents.ipc, 'spark:ready', { signal })
|
|
117
|
+
])
|
|
118
|
+
|
|
119
|
+
if (coverage)
|
|
120
|
+
var stopDebugger = await startDebugger(out, win.webContents)
|
|
121
|
+
|
|
122
|
+
win.webContents.send('spark:start')
|
|
123
|
+
await once(win.webContents.ipc, 'spark:done', { signal })
|
|
124
|
+
|
|
125
|
+
if (coverage)
|
|
126
|
+
var lcov = await stopDebugger()
|
|
127
|
+
|
|
128
|
+
return { summary, coverage: lcov }
|
|
129
|
+
} finally {
|
|
130
|
+
if (!win.isDestroyed()) win.close()
|
|
131
|
+
}
|
|
132
|
+
}
|
package/lib/stream.js
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
import { PassThrough, Readable } from 'node:stream'
|
|
2
|
+
|
|
3
|
+
export async function report (source, pairs, tapper) {
|
|
4
|
+
let values = []
|
|
5
|
+
if (tapper)
|
|
6
|
+
source = source.compose(tapper(data => values.push(data)))
|
|
7
|
+
|
|
8
|
+
await Promise.all(pairs.map(([reporter, dest]) => {
|
|
9
|
+
let stream = source.compose(reporter)
|
|
10
|
+
stream.pipe(dest)
|
|
11
|
+
return new Promise(resolve => stream.on('end', resolve))
|
|
12
|
+
}))
|
|
13
|
+
return values
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
export function tap (type) {
|
|
17
|
+
return (fn) => async function * (source) {
|
|
18
|
+
for await (let event of source) {
|
|
19
|
+
if (event.type === type) fn(event.data)
|
|
20
|
+
yield event
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export function combine (sources) {
|
|
26
|
+
if (sources.length === 1) return sources[0]
|
|
27
|
+
return Readable.from(merge(sources), { objectMode: true })
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
async function * merge (sources) {
|
|
31
|
+
let n = sources.length
|
|
32
|
+
let planCount = 0
|
|
33
|
+
let plansSeen = 0
|
|
34
|
+
let summaries = []
|
|
35
|
+
let diagnostics = {}
|
|
36
|
+
let coverages = []
|
|
37
|
+
|
|
38
|
+
let pt = new PassThrough({ objectMode: true })
|
|
39
|
+
let pending = n
|
|
40
|
+
|
|
41
|
+
for (let source of sources) {
|
|
42
|
+
source.on('data', (event) => pt.write(event))
|
|
43
|
+
source.on('error', (err) => { if (!pt.destroyed) pt.destroy(err) })
|
|
44
|
+
source.on('end', () => { if (--pending === 0) pt.end() })
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
for await (let event of pt) {
|
|
48
|
+
let { type, data } = event
|
|
49
|
+
|
|
50
|
+
if (type === 'test:summary') {
|
|
51
|
+
summaries.push(data)
|
|
52
|
+
if (summaries.length === n) {
|
|
53
|
+
for (let [key, val] of Object.entries(diagnostics))
|
|
54
|
+
yield { type: 'test:diagnostic', data: { nesting: 0, message: `${key} ${val}` } }
|
|
55
|
+
for (let c of coverages) yield c
|
|
56
|
+
yield { type: 'test:summary', data: mergeSummaries(summaries) }
|
|
57
|
+
}
|
|
58
|
+
continue
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
if (type === 'test:coverage') { coverages.push(event); continue }
|
|
62
|
+
|
|
63
|
+
if (data?.nesting === 0) {
|
|
64
|
+
if (type === 'test:plan') {
|
|
65
|
+
planCount += data.count
|
|
66
|
+
if (++plansSeen === n)
|
|
67
|
+
yield { type: 'test:plan', data: { nesting: 0, count: planCount } }
|
|
68
|
+
continue
|
|
69
|
+
}
|
|
70
|
+
if (type === 'test:diagnostic') {
|
|
71
|
+
let [key, val] = data.message.split(' ')
|
|
72
|
+
diagnostics[key] = (diagnostics[key] ?? 0) + Number(val)
|
|
73
|
+
continue
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
yield event
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
function mergeSummaries (summaries) {
|
|
82
|
+
let counts = {}
|
|
83
|
+
for (let s of summaries) {
|
|
84
|
+
for (let [key, val] of Object.entries(s.counts))
|
|
85
|
+
counts[key] = (counts[key] ?? 0) + val
|
|
86
|
+
}
|
|
87
|
+
return {
|
|
88
|
+
...summaries[0],
|
|
89
|
+
counts,
|
|
90
|
+
success: summaries.every((s) => s.success)
|
|
91
|
+
}
|
|
92
|
+
}
|
package/lib/ui.cjs
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
const process = require('node:process')
|
|
2
|
+
const {
|
|
3
|
+
after,
|
|
4
|
+
afterEach,
|
|
5
|
+
before,
|
|
6
|
+
beforeEach,
|
|
7
|
+
describe,
|
|
8
|
+
it,
|
|
9
|
+
suite,
|
|
10
|
+
test
|
|
11
|
+
} = require('node:test')
|
|
12
|
+
|
|
13
|
+
function bdd (target = globalThis) {
|
|
14
|
+
Object.assign(target, {
|
|
15
|
+
after,
|
|
16
|
+
afterEach,
|
|
17
|
+
before,
|
|
18
|
+
beforeEach,
|
|
19
|
+
context: describe,
|
|
20
|
+
describe,
|
|
21
|
+
it,
|
|
22
|
+
specify: it
|
|
23
|
+
})
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
function tdd (target = globalThis) {
|
|
27
|
+
Object.assign(target, {
|
|
28
|
+
suite,
|
|
29
|
+
suiteSetup: before,
|
|
30
|
+
suiteTeardown: after,
|
|
31
|
+
setup: before,
|
|
32
|
+
teardown: after,
|
|
33
|
+
test
|
|
34
|
+
})
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
function expose (name, target = globalThis) {
|
|
38
|
+
switch (name) {
|
|
39
|
+
case 'bdd': return bdd(target)
|
|
40
|
+
case 'tdd': return tdd(target)
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
module.exports = {
|
|
45
|
+
expose,
|
|
46
|
+
bdd,
|
|
47
|
+
tdd
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// Auto-assign as side-effect!
|
|
51
|
+
if (process.env.SPARK_UI)
|
|
52
|
+
expose(process.env.SPARK_UI)
|
package/package.json
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "spark-chamber",
|
|
3
|
+
"productName": "spark",
|
|
4
|
+
"version": "0.1.0-alpha.0",
|
|
5
|
+
"description": "The Electron test runner",
|
|
6
|
+
"main": "lib/main.js",
|
|
7
|
+
"exports": {
|
|
8
|
+
".": "./lib/spark.js",
|
|
9
|
+
"./config": "./lib/config.js",
|
|
10
|
+
"./init": "./lib/init.js"
|
|
11
|
+
},
|
|
12
|
+
"files": [
|
|
13
|
+
"bin",
|
|
14
|
+
"lib"
|
|
15
|
+
],
|
|
16
|
+
"type": "module",
|
|
17
|
+
"bin": {
|
|
18
|
+
"spark": "./bin/spark.cjs"
|
|
19
|
+
},
|
|
20
|
+
"scripts": {
|
|
21
|
+
"lint": "eslint eslint.config.js bin lib test",
|
|
22
|
+
"prepublishOnly": "npm run lint && npm test",
|
|
23
|
+
"test": "npm run test:spark && npm run test:node",
|
|
24
|
+
"test:spark": "electron --no-sandbox . test/main/**/*.test.js -r test/renderer/**/*.test.js",
|
|
25
|
+
"test:node": "node --test test/cli.test.js"
|
|
26
|
+
},
|
|
27
|
+
"repository": {
|
|
28
|
+
"type": "git",
|
|
29
|
+
"url": "git+https://github.com/inukshuk/spark.git"
|
|
30
|
+
},
|
|
31
|
+
"keywords": [
|
|
32
|
+
"electron",
|
|
33
|
+
"test",
|
|
34
|
+
"testing",
|
|
35
|
+
"coverage"
|
|
36
|
+
],
|
|
37
|
+
"author": "Sylvester Keil <sylvester@keil.or.at>",
|
|
38
|
+
"license": "MIT",
|
|
39
|
+
"bugs": {
|
|
40
|
+
"url": "https://github.com/inukshuk/spark/issues"
|
|
41
|
+
},
|
|
42
|
+
"homepage": "https://github.com/inukshuk/spark#readme",
|
|
43
|
+
"engines": {
|
|
44
|
+
"node": ">=24"
|
|
45
|
+
},
|
|
46
|
+
"peerDependencies": {
|
|
47
|
+
"electron": ">=40"
|
|
48
|
+
},
|
|
49
|
+
"peerDependenciesMeta": {
|
|
50
|
+
"electron": {
|
|
51
|
+
"optional": true
|
|
52
|
+
}
|
|
53
|
+
},
|
|
54
|
+
"devDependencies": {
|
|
55
|
+
"electron": "^41.0.2",
|
|
56
|
+
"eslint": "^9.5.0",
|
|
57
|
+
"neostandard": "^0.13.0"
|
|
58
|
+
}
|
|
59
|
+
}
|