demowright 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,529 @@
1
+ import { n as __exportAll, t as __esmMin } from "./chunk-C0p4GxOx.mjs";
2
+ import { a as getTtsProvider, d as storeRenderJob, n as getGlobalOutputDir, o as init_hud_registry, r as getGlobalTtsProvider, s as isHudActive, u as storeAudioSegment } from "./hud-registry-CptkyV32.mjs";
3
+ import { mkdirSync, writeFileSync } from "node:fs";
4
+ import { join } from "node:path";
5
+ //#region src/video-script.ts
6
+ /**
7
+ * Video script — narration-driven video production for demowright.
8
+ *
9
+ * Extends the narration plan pattern with title cards, transitions,
10
+ * auto-generated SRT subtitles, chapter markers, and direct MP4 render.
11
+ *
12
+ * ```ts
13
+ * const script = createVideoScript()
14
+ * .title("My Product Tour")
15
+ * .segment("Welcome to the dashboard", async (pace) => {
16
+ * await page.goto("/dashboard");
17
+ * await pace();
18
+ * })
19
+ * .transition("fade", 500)
20
+ * .segment("Let's fill the form", async (pace) => {
21
+ * await clickEl(page, "#name");
22
+ * await pace();
23
+ * await typeKeys(page, "Alice");
24
+ * })
25
+ * .outro({ text: "Thanks for watching!" })
26
+ *
27
+ * const result = await script.render(page);
28
+ * // result.mp4Path, result.srtPath, result.timeline
29
+ * ```
30
+ */
31
+ var video_script_exports = /* @__PURE__ */ __exportAll({
32
+ buildFfmpegCommand: () => buildFfmpegCommand,
33
+ createVideoScript: () => createVideoScript
34
+ });
35
+ async function fetchTtsAudio(text, provider) {
36
+ if (typeof provider === "string") {
37
+ const url = provider.replace(/%s/g, encodeURIComponent(text));
38
+ const res = await fetch(url);
39
+ if (!res.ok) throw new Error(`TTS fetch ${res.status}`);
40
+ return Buffer.from(await res.arrayBuffer());
41
+ }
42
+ const result = await provider(text);
43
+ return Buffer.isBuffer(result) ? result : Buffer.from(result);
44
+ }
45
+ function parseWavDuration(wavBuf) {
46
+ const dataOffset = wavBuf.indexOf("data") + 8;
47
+ if (dataOffset < 8) return 0;
48
+ const sampleRate = wavBuf.readUInt32LE(24);
49
+ const channels = wavBuf.readUInt16LE(22);
50
+ return wavBuf.subarray(dataOffset).length / 2 / channels / sampleRate * 1e3;
51
+ }
52
+ function parseWav(wavBuf) {
53
+ const dataOffset = wavBuf.indexOf("data") + 8;
54
+ if (dataOffset < 8) throw new Error("Invalid WAV");
55
+ const sampleRate = wavBuf.readUInt32LE(24);
56
+ const channels = wavBuf.readUInt16LE(22);
57
+ const pcmData = wavBuf.subarray(dataOffset);
58
+ const sampleCount = pcmData.length / 2;
59
+ const float32 = new Float32Array(sampleCount);
60
+ for (let i = 0; i < sampleCount; i++) float32[i] = pcmData.readInt16LE(i * 2) / 32768;
61
+ return {
62
+ float32,
63
+ sampleRate,
64
+ channels,
65
+ sampleCount,
66
+ durationMs: sampleCount / channels / sampleRate * 1e3
67
+ };
68
+ }
69
+ async function showCard(page, text, subtitle, durationMs, background) {
70
+ await page.evaluate(([t, sub, d, bg]) => {
71
+ const overlay = document.createElement("div");
72
+ overlay.id = "qa-vs-card";
73
+ overlay.style.cssText = [
74
+ "position:fixed",
75
+ "inset:0",
76
+ `background:${bg}`,
77
+ "display:flex",
78
+ "flex-direction:column",
79
+ "align-items:center",
80
+ "justify-content:center",
81
+ "z-index:2147483646",
82
+ "pointer-events:none",
83
+ `animation:qa-vs-card-anim ${d}ms ease-in-out forwards`
84
+ ].join(";");
85
+ const h = document.createElement("div");
86
+ h.textContent = t;
87
+ h.style.cssText = "font-family:'Segoe UI',system-ui,sans-serif;font-size:48px;font-weight:800;color:#fff;text-align:center;max-width:80vw;line-height:1.2;text-shadow:0 2px 20px rgba(0,0,0,0.5);";
88
+ overlay.appendChild(h);
89
+ if (sub) {
90
+ const s = document.createElement("div");
91
+ s.textContent = sub;
92
+ s.style.cssText = "font-family:'Segoe UI',system-ui,sans-serif;font-size:22px;font-weight:400;color:rgba(255,255,255,0.7);margin-top:16px;text-align:center;max-width:70vw;";
93
+ overlay.appendChild(s);
94
+ }
95
+ if (!document.querySelector("#qa-vs-card-style")) {
96
+ const style = document.createElement("style");
97
+ style.id = "qa-vs-card-style";
98
+ style.textContent = `
99
+ @keyframes qa-vs-card-anim {
100
+ 0% { opacity: 0; }
101
+ 10% { opacity: 1; }
102
+ 85% { opacity: 1; }
103
+ 100% { opacity: 0; }
104
+ }
105
+ `;
106
+ document.head.appendChild(style);
107
+ }
108
+ document.body.appendChild(overlay);
109
+ setTimeout(() => overlay.remove(), d);
110
+ }, [
111
+ text,
112
+ subtitle,
113
+ durationMs,
114
+ background
115
+ ]);
116
+ }
117
+ async function showCaption(page, text, durationMs) {
118
+ await page.evaluate(([t, d]) => {
119
+ const el = document.createElement("div");
120
+ el.textContent = t;
121
+ el.style.cssText = [
122
+ "position:fixed",
123
+ "bottom:60px",
124
+ "left:50%",
125
+ "transform:translateX(-50%)",
126
+ "background:rgba(0,0,0,0.8)",
127
+ "color:#fff",
128
+ "font-family:system-ui,sans-serif",
129
+ "font-size:18px",
130
+ "padding:10px 24px",
131
+ "border-radius:8px",
132
+ "z-index:2147483647",
133
+ "pointer-events:none",
134
+ "white-space:nowrap",
135
+ "max-width:90vw",
136
+ "text-align:center",
137
+ `animation:qa-sub-fade ${d}ms ease-out forwards`
138
+ ].join(";");
139
+ if (!document.querySelector("#qa-sub-style")) {
140
+ const style = document.createElement("style");
141
+ style.id = "qa-sub-style";
142
+ style.textContent = `
143
+ @keyframes qa-sub-fade {
144
+ 0% { opacity: 0; transform: translateX(-50%) translateY(10px); }
145
+ 8% { opacity: 1; transform: translateX(-50%) translateY(0); }
146
+ 85% { opacity: 1; transform: translateX(-50%) translateY(0); }
147
+ 100% { opacity: 0; transform: translateX(-50%) translateY(-10px); }
148
+ }
149
+ `;
150
+ document.head.appendChild(style);
151
+ }
152
+ document.body.appendChild(el);
153
+ setTimeout(() => el.remove(), d);
154
+ }, [text, durationMs]);
155
+ }
156
+ function formatSrtTimestamp(ms) {
157
+ const totalSec = Math.floor(ms / 1e3);
158
+ const h = Math.floor(totalSec / 3600);
159
+ const m = Math.floor(totalSec % 3600 / 60);
160
+ const s = totalSec % 60;
161
+ const millis = Math.floor(ms % 1e3);
162
+ return `${String(h).padStart(2, "0")}:${String(m).padStart(2, "0")}:${String(s).padStart(2, "0")},${String(millis).padStart(3, "0")}`;
163
+ }
164
+ function generateSrt(timeline) {
165
+ return timeline.filter((e) => e.kind === "segment" || e.kind === "title" || e.kind === "outro").map((entry, i) => {
166
+ const start = formatSrtTimestamp(entry.startMs);
167
+ const end = formatSrtTimestamp(entry.startMs + entry.durationMs);
168
+ return `${i + 1}\n${start} --> ${end}\n${entry.text}\n`;
169
+ }).join("\n");
170
+ }
171
+ function generateChapters(timeline) {
172
+ return timeline.filter((e) => e.kind === "segment" || e.kind === "title" || e.kind === "outro").map((entry) => {
173
+ (entry.startMs / 1e3).toFixed(3);
174
+ ((entry.startMs + entry.durationMs) / 1e3).toFixed(3);
175
+ return `[CHAPTER]\nTIMEBASE=1/1000\nSTART=${Math.round(entry.startMs)}\nEND=${Math.round(entry.startMs + entry.durationMs)}\ntitle=${entry.text.slice(0, 80)}`;
176
+ }).join("\n\n");
177
+ }
178
+ /**
179
+ * Build a complete ffmpeg command that:
180
+ * 1. Muxes video + audio
181
+ * 2. Applies fade transitions at the correct offsets
182
+ * 3. Burns in SRT subtitles
183
+ * 4. Embeds chapter metadata
184
+ */
185
+ function buildFfmpegCommand(videoPath, wavPath, srtPath, chaptersPath, mp4Path, timeline) {
186
+ const filters = [];
187
+ const transitions = timeline.filter((e) => e.kind === "transition");
188
+ for (const t of transitions) {
189
+ const startSec = (t.startMs / 1e3).toFixed(3);
190
+ const durSec = (t.durationMs / 1e3).toFixed(3);
191
+ const endSec = ((t.startMs + t.durationMs) / 1e3).toFixed(3);
192
+ filters.push(`fade=t=out:st=${startSec}:d=${durSec}`);
193
+ filters.push(`fade=t=in:st=${endSec}:d=${durSec}`);
194
+ }
195
+ const escapedSrt = srtPath.replace(/\\/g, "/").replace(/:/g, "\\\\:").replace(/'/g, "'\\''");
196
+ filters.push(`subtitles='${escapedSrt}'`);
197
+ const vf = filters.join(",");
198
+ return [
199
+ "ffmpeg -y",
200
+ `-i "${videoPath}"`,
201
+ `-i "${wavPath}"`,
202
+ `-i "${chaptersPath}"`,
203
+ "-map_metadata 2",
204
+ `-vf "${vf}"`,
205
+ "-c:v libx264 -preset fast",
206
+ "-c:a aac",
207
+ "-shortest",
208
+ `"${mp4Path}"`
209
+ ].join(" \\\n ");
210
+ }
211
+ function buildWavTrack(segments, totalMs) {
212
+ if (segments.length === 0) return null;
213
+ const anySeg = segments[0];
214
+ const { sampleRate } = parseWav(anySeg.wavBuf);
215
+ const channels = 2;
216
+ const totalSamples = Math.ceil(totalMs / 1e3 * sampleRate * channels);
217
+ const trackBuffer = new Float32Array(totalSamples);
218
+ for (const seg of segments) {
219
+ const parsed = parseWav(seg.wavBuf);
220
+ const offsetSamples = Math.floor(seg.offsetMs / 1e3 * sampleRate) * channels;
221
+ const stereo = parsed.channels === 1 ? (() => {
222
+ const s = new Float32Array(parsed.sampleCount * 2);
223
+ for (let i = 0; i < parsed.sampleCount; i++) {
224
+ s[i * 2] = parsed.float32[i];
225
+ s[i * 2 + 1] = parsed.float32[i];
226
+ }
227
+ return s;
228
+ })() : parsed.float32;
229
+ for (let i = 0; i < stereo.length && offsetSamples + i < trackBuffer.length; i++) trackBuffer[offsetSamples + i] += stereo[i];
230
+ }
231
+ const int16 = new Int16Array(trackBuffer.length);
232
+ for (let i = 0; i < trackBuffer.length; i++) {
233
+ const s = Math.max(-1, Math.min(1, trackBuffer[i]));
234
+ int16[i] = s < 0 ? s * 32768 : s * 32767;
235
+ }
236
+ const dataBytes = int16.length * 2;
237
+ const buffer = Buffer.alloc(44 + dataBytes);
238
+ buffer.write("RIFF", 0);
239
+ buffer.writeUInt32LE(36 + dataBytes, 4);
240
+ buffer.write("WAVE", 8);
241
+ buffer.write("fmt ", 12);
242
+ buffer.writeUInt32LE(16, 16);
243
+ buffer.writeUInt16LE(1, 20);
244
+ buffer.writeUInt16LE(channels, 22);
245
+ buffer.writeUInt32LE(sampleRate, 24);
246
+ buffer.writeUInt32LE(sampleRate * channels * 2, 28);
247
+ buffer.writeUInt16LE(channels * 2, 32);
248
+ buffer.writeUInt16LE(16, 34);
249
+ buffer.write("data", 36);
250
+ buffer.writeUInt32LE(dataBytes, 40);
251
+ Buffer.from(int16.buffer).copy(buffer, 44);
252
+ return buffer;
253
+ }
254
+ /**
255
+ * Create a video script — narration-driven video production with title cards,
256
+ * transitions, auto-subtitles, and chapter markers.
257
+ */
258
+ function createVideoScript() {
259
+ return new VideoScriptImpl();
260
+ }
261
+ var DEFAULT_BG, VideoScriptImpl;
262
+ var init_video_script = __esmMin((() => {
263
+ init_hud_registry();
264
+ DEFAULT_BG = "radial-gradient(ellipse at 50% 40%, #1a1a3e 0%, #0b0b1a 100%)";
265
+ VideoScriptImpl = class VideoScriptImpl {
266
+ steps = [];
267
+ counter = 0;
268
+ prepared = /* @__PURE__ */ new Map();
269
+ static ttsCache = /* @__PURE__ */ new Map();
270
+ /**
271
+ * Add a title card — full-screen overlay with text + optional subtitle.
272
+ * No TTS narration by default (silent title card).
273
+ */
274
+ title(text, opts) {
275
+ this.steps.push({
276
+ kind: "title",
277
+ text,
278
+ subtitle: opts?.subtitle,
279
+ durationMs: opts?.durationMs ?? 4e3,
280
+ background: opts?.background
281
+ });
282
+ return this;
283
+ }
284
+ /**
285
+ * Add a narrated segment — TTS audio drives timing, callback runs paced actions.
286
+ * Same as NarrationPlan's `.annotate()` but named `.segment()` for clarity.
287
+ */
288
+ segment(text, action) {
289
+ this.steps.push({
290
+ kind: "segment",
291
+ text,
292
+ action
293
+ });
294
+ return this;
295
+ }
296
+ /**
297
+ * Add a transition between segments.
298
+ * Applied as an ffmpeg filter during render.
299
+ */
300
+ transition(type = "fade", durationMs = 500) {
301
+ this.steps.push({
302
+ kind: "transition",
303
+ type,
304
+ durationMs
305
+ });
306
+ return this;
307
+ }
308
+ /**
309
+ * Add an outro card — full-screen overlay, similar to title.
310
+ */
311
+ outro(opts) {
312
+ this.steps.push({
313
+ kind: "outro",
314
+ text: opts?.text ?? "Thanks for watching!",
315
+ subtitle: opts?.subtitle,
316
+ durationMs: opts?.durationMs ?? 4e3,
317
+ background: opts?.background
318
+ });
319
+ return this;
320
+ }
321
+ /**
322
+ * Pre-generate all TTS audio for segment steps.
323
+ * Call before page.goto() so video recording doesn't include TTS wait time.
324
+ */
325
+ async prepare(pageOrProvider) {
326
+ let provider;
327
+ if (!pageOrProvider) provider = getGlobalTtsProvider();
328
+ else if (typeof pageOrProvider === "function" || typeof pageOrProvider === "string") provider = pageOrProvider;
329
+ else provider = getTtsProvider(pageOrProvider);
330
+ if (!provider) return;
331
+ const segmentSteps = this.steps.filter((s) => s.kind === "segment").map((s, i) => ({
332
+ ...s,
333
+ id: `step-${i}`
334
+ }));
335
+ const results = await Promise.allSettled(segmentSteps.map(async (step) => {
336
+ const cached = VideoScriptImpl.ttsCache.get(step.text);
337
+ if (cached) return {
338
+ id: step.id,
339
+ ...cached
340
+ };
341
+ const wavBuf = await fetchTtsAudio(step.text, provider);
342
+ const durationMs = parseWavDuration(wavBuf);
343
+ return {
344
+ id: step.id,
345
+ wavBuf,
346
+ durationMs
347
+ };
348
+ }));
349
+ let idx = 0;
350
+ for (const r of results) {
351
+ if (r.status === "fulfilled") {
352
+ const seg = {
353
+ wavBuf: r.value.wavBuf,
354
+ durationMs: r.value.durationMs
355
+ };
356
+ this.prepared.set(r.value.id, seg);
357
+ const step = segmentSteps[idx];
358
+ if (step) VideoScriptImpl.ttsCache.set(step.text, seg);
359
+ }
360
+ idx++;
361
+ }
362
+ }
363
+ /**
364
+ * Execute the script steps — run each step timed to its narration duration.
365
+ * Returns timeline + totalMs. Audio segments are stored in the registry
366
+ * for deferred track building at context close (setup.ts).
367
+ */
368
+ async run(page) {
369
+ const { timeline, totalMs, audioSegments } = await this.executeSteps(page);
370
+ return {
371
+ timeline,
372
+ totalMs,
373
+ srtContent: generateSrt(timeline),
374
+ chaptersContent: generateChapters(timeline)
375
+ };
376
+ }
377
+ /**
378
+ * Execute the script and produce final output files:
379
+ * WAV audio track, SRT subtitles, chapter metadata.
380
+ * Audio mux with video happens at context close (setup.ts).
381
+ */
382
+ async render(page, opts) {
383
+ const { timeline, totalMs, audioSegments } = await this.executeSteps(page);
384
+ const srtContent = generateSrt(timeline);
385
+ const chaptersContent = generateChapters(timeline);
386
+ const outputDir = opts?.outputDir ?? join(process.cwd(), getGlobalOutputDir());
387
+ const tmpDir = join(outputDir, "tmp");
388
+ const baseName = opts?.baseName ?? `demowright-video-${Date.now()}`;
389
+ mkdirSync(outputDir, { recursive: true });
390
+ mkdirSync(tmpDir, { recursive: true });
391
+ const wavPath = join(tmpDir, `${baseName}.wav`);
392
+ const srtPath = join(tmpDir, `${baseName}.srt`);
393
+ const chaptersPath = join(tmpDir, `${baseName}-chapters.txt`);
394
+ const mp4Path = join(outputDir, `${baseName}.mp4`);
395
+ writeFileSync(srtPath, srtContent);
396
+ if (chaptersContent) writeFileSync(chaptersPath, chaptersContent);
397
+ const wavBuf = buildWavTrack(audioSegments, totalMs);
398
+ if (wavBuf) writeFileSync(wavPath, wavBuf);
399
+ if (wavBuf) {
400
+ const job = {
401
+ wavPath,
402
+ srtPath,
403
+ chaptersPath,
404
+ mp4Path,
405
+ timeline: timeline.map((e) => ({
406
+ kind: e.kind,
407
+ type: e.kind === "transition" ? e.text.replace(/^\[|\]$/g, "") : void 0,
408
+ startMs: e.startMs,
409
+ durationMs: e.durationMs
410
+ })),
411
+ totalMs
412
+ };
413
+ storeRenderJob(page, job);
414
+ const cmd = buildFfmpegCommand("<video.webm>", wavPath, srtPath, chaptersPath, mp4Path, job.timeline);
415
+ console.log(`[demowright] Render files ready. After test completes, run:\n${cmd}`);
416
+ return {
417
+ timeline,
418
+ totalMs,
419
+ srtContent,
420
+ srtPath,
421
+ chaptersContent,
422
+ mp4Path: void 0,
423
+ wavPath,
424
+ ffmpegCommand: cmd
425
+ };
426
+ }
427
+ return {
428
+ timeline,
429
+ totalMs,
430
+ srtContent,
431
+ srtPath,
432
+ chaptersContent
433
+ };
434
+ }
435
+ async executeSteps(page) {
436
+ const active = await isHudActive(page);
437
+ const provider = getTtsProvider(page);
438
+ if (this.prepared.size === 0 && active && provider) await this.prepare(page);
439
+ let segIdx = 0;
440
+ const stepIds = this.steps.map((s) => {
441
+ if (s.kind === "segment") return `step-${segIdx++}`;
442
+ return `step-${s.kind}-${this.steps.indexOf(s)}`;
443
+ });
444
+ const timeline = [];
445
+ const audioSegments = [];
446
+ const planStartMs = Date.now();
447
+ let segmentIndex = 0;
448
+ for (let i = 0; i < this.steps.length; i++) {
449
+ const step = this.steps[i];
450
+ const stepStartMs = Date.now();
451
+ if (step.kind === "title" || step.kind === "outro") {
452
+ const bg = step.background ?? DEFAULT_BG;
453
+ if (active) showCard(page, step.text, step.subtitle, step.durationMs, bg).catch(() => {});
454
+ await page.waitForTimeout(step.durationMs);
455
+ timeline.push({
456
+ id: stepIds[i],
457
+ kind: step.kind,
458
+ text: step.text,
459
+ startMs: stepStartMs - planStartMs,
460
+ durationMs: step.durationMs,
461
+ actionMs: 0,
462
+ overrunMs: 0
463
+ });
464
+ } else if (step.kind === "transition") {
465
+ if (active) await page.waitForTimeout(step.durationMs);
466
+ timeline.push({
467
+ id: stepIds[i],
468
+ kind: "transition",
469
+ text: `[${step.type}]`,
470
+ startMs: stepStartMs - planStartMs,
471
+ durationMs: step.durationMs,
472
+ actionMs: 0,
473
+ overrunMs: 0
474
+ });
475
+ } else if (step.kind === "segment") {
476
+ const segId = `step-${segmentIndex}`;
477
+ segmentIndex++;
478
+ const segment = this.prepared.get(segId);
479
+ const targetMs = segment?.durationMs ?? 3e3;
480
+ if (active) showCaption(page, step.text, targetMs).catch(() => {});
481
+ if (active && segment) {
482
+ const offsetMs = Date.now() - planStartMs;
483
+ audioSegments.push({
484
+ offsetMs,
485
+ wavBuf: segment.wavBuf
486
+ });
487
+ storeAudioSegment(page, {
488
+ timestampMs: Date.now(),
489
+ wavBuf: segment.wavBuf
490
+ });
491
+ }
492
+ let paceCallCount = 0;
493
+ const paceEstimate = 8;
494
+ const pace = async () => {
495
+ paceCallCount++;
496
+ const remaining = targetMs - (Date.now() - stepStartMs);
497
+ const remainingPaces = Math.max(1, paceEstimate - paceCallCount);
498
+ const delay = Math.max(50, remaining / remainingPaces);
499
+ if (remaining > 50 && active) await page.waitForTimeout(Math.min(delay, remaining));
500
+ };
501
+ const actionStartMs = Date.now();
502
+ await step.action?.(pace);
503
+ const actionMs = Date.now() - actionStartMs;
504
+ const remaining = targetMs - (Date.now() - stepStartMs);
505
+ const overrunMs = Math.max(0, -remaining);
506
+ if (remaining > 50 && active) await page.waitForTimeout(remaining);
507
+ const actualDuration = Date.now() - stepStartMs;
508
+ timeline.push({
509
+ id: segId,
510
+ kind: "segment",
511
+ text: step.text,
512
+ startMs: stepStartMs - planStartMs,
513
+ durationMs: actualDuration,
514
+ actionMs,
515
+ overrunMs
516
+ });
517
+ }
518
+ }
519
+ return {
520
+ timeline,
521
+ totalMs: Date.now() - planStartMs,
522
+ audioSegments
523
+ };
524
+ }
525
+ };
526
+ }));
527
+ //#endregion
528
+ init_video_script();
529
+ export { buildFfmpegCommand, createVideoScript, video_script_exports as n, init_video_script as t };
Binary file
Binary file
Binary file
package/package.json ADDED
@@ -0,0 +1,61 @@
1
+ {
2
+ "name": "demowright",
3
+ "version": "0.1.0",
4
+ "description": "Playwright video production plugin — cursor overlay, keystroke badges, TTS narration, and narration-driven video scripts for test recordings",
5
+ "license": "MIT",
6
+ "repository": {
7
+ "type": "git",
8
+ "url": "https://github.com/snomiao/demowright.git"
9
+ },
10
+ "files": [
11
+ "dist",
12
+ "register.cjs",
13
+ "media"
14
+ ],
15
+ "type": "module",
16
+ "main": "./dist/index.mjs",
17
+ "types": "./dist/index.d.mts",
18
+ "exports": {
19
+ ".": {
20
+ "import": "./dist/index.mjs",
21
+ "types": "./dist/index.d.mts"
22
+ },
23
+ "./register": "./register.cjs",
24
+ "./config": {
25
+ "import": "./dist/config.mjs",
26
+ "types": "./dist/config.d.mts"
27
+ },
28
+ "./helpers": {
29
+ "import": "./dist/helpers.mjs",
30
+ "types": "./dist/helpers.d.mts"
31
+ },
32
+ "./auto-annotate": {
33
+ "import": "./dist/auto-annotate.mjs",
34
+ "types": "./dist/auto-annotate.d.mts"
35
+ }
36
+ },
37
+ "publishConfig": {
38
+ "access": "public"
39
+ },
40
+ "scripts": {
41
+ "build": "tsdown",
42
+ "test": "bunx playwright test",
43
+ "demo": "bunx playwright test tests/demo.spec.ts --headed",
44
+ "prepublishOnly": "tsdown"
45
+ },
46
+ "devDependencies": {
47
+ "@playwright/test": "^1.52.0",
48
+ "@types/node": "^25.5.2",
49
+ "@typescript/native-preview": "^7.0.0-dev.20260405.1",
50
+ "semantic-release": "^25.0.3",
51
+ "tsdown": "^0.21.7"
52
+ },
53
+ "peerDependencies": {
54
+ "@playwright/test": ">=1.40.0"
55
+ },
56
+ "release": {
57
+ "branches": [
58
+ "main"
59
+ ]
60
+ }
61
+ }