demowright 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +189 -0
- package/dist/auto-annotate.d.mts +6 -0
- package/dist/auto-annotate.mjs +9 -0
- package/dist/chunk-C0p4GxOx.mjs +29 -0
- package/dist/config.d.mts +7 -0
- package/dist/config.mjs +36 -0
- package/dist/helpers.d.mts +95 -0
- package/dist/helpers.mjs +299 -0
- package/dist/hud-registry-CptkyV32.mjs +90 -0
- package/dist/index.d.mts +54044 -0
- package/dist/index.mjs +29 -0
- package/dist/setup-CfPylVYx.mjs +652 -0
- package/dist/setup-D-Ut4FYK.d.mts +63 -0
- package/dist/setup.d.mts +2 -0
- package/dist/setup.mjs +2 -0
- package/dist/video-script.d.mts +134 -0
- package/dist/video-script.mjs +529 -0
- package/media/01-dashboard.jpg +0 -0
- package/media/02-code-editor.jpg +0 -0
- package/media/03-checkout.jpg +0 -0
- package/package.json +61 -0
- package/register.cjs +145 -0
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import { r as AudioWriter, t as applyHud } from "./setup-CfPylVYx.mjs";
|
|
2
|
+
import { o as init_hud_registry, r as getGlobalTtsProvider } from "./hud-registry-CptkyV32.mjs";
|
|
3
|
+
import { buildFfmpegCommand, createVideoScript, t as init_video_script } from "./video-script.mjs";
|
|
4
|
+
import { annotate, caption, clickEl, hudWait, moveTo, moveToEl, narrate, subtitle, typeKeys } from "./helpers.mjs";
|
|
5
|
+
import { installAutoAnnotate } from "./auto-annotate.mjs";
|
|
6
|
+
import { expect, test as test$1 } from "@playwright/test";
|
|
7
|
+
//#region src/fixture.ts
|
|
8
|
+
/**
|
|
9
|
+
* Approach 1: Fixture-based integration (import replacement).
|
|
10
|
+
*
|
|
11
|
+
* import { test, expect } from 'demowright';
|
|
12
|
+
*/
|
|
13
|
+
const test = test$1.extend({
|
|
14
|
+
qaHud: [{}, { option: true }],
|
|
15
|
+
context: async ({ context, qaHud }, use) => {
|
|
16
|
+
await applyHud(context, qaHud);
|
|
17
|
+
await use(context);
|
|
18
|
+
},
|
|
19
|
+
page: async ({ page, qaHud }, use, testInfo) => {
|
|
20
|
+
if (qaHud.autoAnnotate) await annotate(page, testInfo.titlePath.length > 1 ? testInfo.titlePath.join(" › ") : testInfo.title);
|
|
21
|
+
await use(page);
|
|
22
|
+
}
|
|
23
|
+
});
|
|
24
|
+
//#endregion
|
|
25
|
+
//#region src/index.ts
|
|
26
|
+
init_video_script();
|
|
27
|
+
init_hud_registry();
|
|
28
|
+
//#endregion
|
|
29
|
+
export { AudioWriter, annotate, applyHud, buildFfmpegCommand, caption, clickEl, createVideoScript, expect, getGlobalTtsProvider, hudWait, installAutoAnnotate, moveTo, moveToEl, narrate, subtitle, test, typeKeys };
|
|
@@ -0,0 +1,652 @@
|
|
|
1
|
+
import { r as __toCommonJS } from "./chunk-C0p4GxOx.mjs";
|
|
2
|
+
import { c as registerHudPage, i as getRenderJob, l as setGlobalOutputDir, o as init_hud_registry, t as getAudioSegments } from "./hud-registry-CptkyV32.mjs";
|
|
3
|
+
import { n as video_script_exports, t as init_video_script } from "./video-script.mjs";
|
|
4
|
+
import { execSync } from "node:child_process";
|
|
5
|
+
import { existsSync, mkdirSync, unlinkSync, writeFileSync } from "node:fs";
|
|
6
|
+
import { join } from "node:path";
|
|
7
|
+
//#region src/hud-overlay.ts
|
|
8
|
+
/**
|
|
9
|
+
* Returns a script string for addInitScript.
|
|
10
|
+
* Sets up event listeners and stores state on window.__qaHud.
|
|
11
|
+
* No DOM mutations — safe to run before document.body exists.
|
|
12
|
+
*/
|
|
13
|
+
function generateListenerScript() {
|
|
14
|
+
return `(${listenerMain.toString()})();`;
|
|
15
|
+
}
|
|
16
|
+
/**
|
|
17
|
+
* Returns a function to call via page.evaluate(fn, opts) after navigation.
|
|
18
|
+
* Creates the overlay DOM elements and wires them to the listener state.
|
|
19
|
+
*/
|
|
20
|
+
function getDomInjector() {
|
|
21
|
+
return domInjector;
|
|
22
|
+
}
|
|
23
|
+
function listenerMain() {
|
|
24
|
+
if (window.__qaHud) return;
|
|
25
|
+
const modifierKeys = new Set([
|
|
26
|
+
"Shift",
|
|
27
|
+
"Control",
|
|
28
|
+
"Alt",
|
|
29
|
+
"Meta",
|
|
30
|
+
"CapsLock"
|
|
31
|
+
]);
|
|
32
|
+
const state = {
|
|
33
|
+
cx: -40,
|
|
34
|
+
cy: -40,
|
|
35
|
+
onCursorMove: null,
|
|
36
|
+
onMouseDown: null,
|
|
37
|
+
onMouseUp: null,
|
|
38
|
+
onKeyDown: null,
|
|
39
|
+
onKeyUp: null,
|
|
40
|
+
onAnnotate: null
|
|
41
|
+
};
|
|
42
|
+
window.__qaHud = state;
|
|
43
|
+
function formatKey(e) {
|
|
44
|
+
if (modifierKeys.has(e.key)) return e.key;
|
|
45
|
+
const parts = [];
|
|
46
|
+
if (e.ctrlKey) parts.push("Ctrl");
|
|
47
|
+
if (e.altKey) parts.push("Alt");
|
|
48
|
+
if (e.shiftKey) parts.push("Shift");
|
|
49
|
+
if (e.metaKey) parts.push("Meta");
|
|
50
|
+
let key = e.key;
|
|
51
|
+
if (key === " ") key = "Space";
|
|
52
|
+
parts.push(key);
|
|
53
|
+
return parts.join("+");
|
|
54
|
+
}
|
|
55
|
+
document.addEventListener("mousemove", (e) => {
|
|
56
|
+
state.cx = e.clientX;
|
|
57
|
+
state.cy = e.clientY;
|
|
58
|
+
state.onCursorMove?.(e.clientX, e.clientY);
|
|
59
|
+
}, true);
|
|
60
|
+
document.addEventListener("mousedown", (e) => state.onMouseDown?.(e.clientX, e.clientY), true);
|
|
61
|
+
document.addEventListener("mouseup", () => state.onMouseUp?.(), true);
|
|
62
|
+
document.addEventListener("keydown", (e) => state.onKeyDown?.(formatKey(e), modifierKeys.has(e.key)), true);
|
|
63
|
+
document.addEventListener("keyup", (e) => {
|
|
64
|
+
if (modifierKeys.has(e.key)) state.onKeyUp?.(e.key);
|
|
65
|
+
}, true);
|
|
66
|
+
}
|
|
67
|
+
function domInjector(opts) {
|
|
68
|
+
if (document.querySelector("[data-qa-hud]")) return;
|
|
69
|
+
const state = window.__qaHud;
|
|
70
|
+
if (!state) return;
|
|
71
|
+
const host = document.createElement("div");
|
|
72
|
+
host.setAttribute("data-qa-hud", "");
|
|
73
|
+
host.style.cssText = "position:fixed;top:0;left:0;width:0;height:0;z-index:2147483647;pointer-events:none;";
|
|
74
|
+
document.body.appendChild(host);
|
|
75
|
+
const cursorSvgs = {
|
|
76
|
+
default: `<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M5 2l14 14h-7.5L16 22l-3 1-4.5-6.5L3 21z" fill="#fff" stroke="#000" stroke-width="1.2"/></svg>`,
|
|
77
|
+
dot: `<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><circle cx="12" cy="12" r="6" fill="red" stroke="#fff" stroke-width="2"/></svg>`,
|
|
78
|
+
crosshair: `<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><circle cx="12" cy="12" r="8" fill="none" stroke="red" stroke-width="2"/><line x1="12" y1="2" x2="12" y2="22" stroke="red" stroke-width="1.5"/><line x1="2" y1="12" x2="22" y2="12" stroke="red" stroke-width="1.5"/></svg>`
|
|
79
|
+
};
|
|
80
|
+
const styleEl = document.createElement("style");
|
|
81
|
+
styleEl.textContent = `
|
|
82
|
+
[data-qa-hud] * { pointer-events: none !important; }
|
|
83
|
+
.qa-cursor {
|
|
84
|
+
position: fixed; top: 0; left: 0;
|
|
85
|
+
width: 20px; height: 20px;
|
|
86
|
+
pointer-events: none; z-index: 2147483647;
|
|
87
|
+
transition: transform 0.02s linear;
|
|
88
|
+
will-change: transform;
|
|
89
|
+
display: ${opts.cursor ? "block" : "none"};
|
|
90
|
+
}
|
|
91
|
+
.qa-cursor svg { width: 20px; height: 20px; filter: drop-shadow(1px 1px 1px rgba(0,0,0,0.5)); }
|
|
92
|
+
.qa-cursor.clicking svg { transform: scale(0.85); }
|
|
93
|
+
.qa-ripple {
|
|
94
|
+
position: fixed; width: 20px; height: 20px;
|
|
95
|
+
border-radius: 50%; border: 2px solid rgba(255, 60, 60, 0.8);
|
|
96
|
+
pointer-events: none;
|
|
97
|
+
animation: qa-ripple-anim 0.5s ease-out forwards;
|
|
98
|
+
z-index: 2147483646;
|
|
99
|
+
}
|
|
100
|
+
@keyframes qa-ripple-anim {
|
|
101
|
+
0% { transform: translate(-50%,-50%) scale(0.5); opacity: 1; }
|
|
102
|
+
100% { transform: translate(-50%,-50%) scale(3); opacity: 0; }
|
|
103
|
+
}
|
|
104
|
+
.qa-keys {
|
|
105
|
+
position: fixed; bottom: 20px; left: 50%;
|
|
106
|
+
transform: translateX(-50%);
|
|
107
|
+
display: ${opts.keyboard ? "flex" : "none"};
|
|
108
|
+
gap: 6px; flex-wrap: wrap; justify-content: center;
|
|
109
|
+
max-width: 80vw; pointer-events: none; z-index: 2147483647;
|
|
110
|
+
}
|
|
111
|
+
.qa-key {
|
|
112
|
+
background: rgba(0,0,0,0.75); color: #fff;
|
|
113
|
+
font-family: ui-monospace, "SF Mono", Menlo, monospace;
|
|
114
|
+
font-size: 14px; line-height: 1;
|
|
115
|
+
padding: 5px 10px; border-radius: 5px;
|
|
116
|
+
border: 1px solid rgba(255,255,255,0.3);
|
|
117
|
+
white-space: nowrap;
|
|
118
|
+
animation: qa-key-fade ${opts.keyFadeMs}ms ease-out forwards;
|
|
119
|
+
box-shadow: 0 2px 6px rgba(0,0,0,0.4);
|
|
120
|
+
}
|
|
121
|
+
.qa-key.modifier {
|
|
122
|
+
background: rgba(60, 120, 255, 0.8);
|
|
123
|
+
animation: none;
|
|
124
|
+
}
|
|
125
|
+
@keyframes qa-key-fade {
|
|
126
|
+
0% { opacity: 1; transform: translateY(0); }
|
|
127
|
+
70% { opacity: 1; transform: translateY(0); }
|
|
128
|
+
100% { opacity: 0; transform: translateY(-10px); }
|
|
129
|
+
}
|
|
130
|
+
.qa-subtitle {
|
|
131
|
+
position: fixed; bottom: 60px; left: 50%;
|
|
132
|
+
transform: translateX(-50%);
|
|
133
|
+
max-width: 80vw; text-align: center;
|
|
134
|
+
pointer-events: none; z-index: 2147483647;
|
|
135
|
+
}
|
|
136
|
+
.qa-subtitle-text {
|
|
137
|
+
display: inline-block;
|
|
138
|
+
background: rgba(0,0,0,0.8); color: #fff;
|
|
139
|
+
font-family: system-ui, -apple-system, sans-serif;
|
|
140
|
+
font-size: 18px; line-height: 1.4;
|
|
141
|
+
padding: 8px 18px; border-radius: 8px;
|
|
142
|
+
box-shadow: 0 2px 10px rgba(0,0,0,0.5);
|
|
143
|
+
animation: qa-subtitle-fade var(--qa-subtitle-ms, 3000ms) ease-out forwards;
|
|
144
|
+
}
|
|
145
|
+
@keyframes qa-subtitle-fade {
|
|
146
|
+
0% { opacity: 1; }
|
|
147
|
+
80% { opacity: 1; }
|
|
148
|
+
100% { opacity: 0; }
|
|
149
|
+
}
|
|
150
|
+
`;
|
|
151
|
+
host.appendChild(styleEl);
|
|
152
|
+
const cursorEl = document.createElement("div");
|
|
153
|
+
cursorEl.className = "qa-cursor";
|
|
154
|
+
cursorEl.innerHTML = cursorSvgs[opts.cursorStyle] || cursorSvgs.default;
|
|
155
|
+
cursorEl.style.transform = `translate(${state.cx}px, ${state.cy}px)`;
|
|
156
|
+
host.appendChild(cursorEl);
|
|
157
|
+
const keysEl = document.createElement("div");
|
|
158
|
+
keysEl.className = "qa-keys";
|
|
159
|
+
host.appendChild(keysEl);
|
|
160
|
+
const subtitleEl = document.createElement("div");
|
|
161
|
+
subtitleEl.className = "qa-subtitle";
|
|
162
|
+
host.appendChild(subtitleEl);
|
|
163
|
+
const activeModifiers = /* @__PURE__ */ new Map();
|
|
164
|
+
state.onCursorMove = (x, y) => {
|
|
165
|
+
cursorEl.style.transform = `translate(${x}px, ${y}px)`;
|
|
166
|
+
};
|
|
167
|
+
state.onMouseDown = (x, y) => {
|
|
168
|
+
cursorEl.classList.add("clicking");
|
|
169
|
+
const ripple = document.createElement("div");
|
|
170
|
+
ripple.className = "qa-ripple";
|
|
171
|
+
ripple.style.left = x + "px";
|
|
172
|
+
ripple.style.top = y + "px";
|
|
173
|
+
host.appendChild(ripple);
|
|
174
|
+
ripple.addEventListener("animationend", () => ripple.remove());
|
|
175
|
+
};
|
|
176
|
+
state.onMouseUp = () => {
|
|
177
|
+
cursorEl.classList.remove("clicking");
|
|
178
|
+
};
|
|
179
|
+
state.onKeyDown = (label, isModifier) => {
|
|
180
|
+
if (isModifier) {
|
|
181
|
+
if (!activeModifiers.has(label)) {
|
|
182
|
+
const el = document.createElement("div");
|
|
183
|
+
el.className = "qa-key modifier";
|
|
184
|
+
el.textContent = label;
|
|
185
|
+
keysEl.prepend(el);
|
|
186
|
+
activeModifiers.set(label, el);
|
|
187
|
+
}
|
|
188
|
+
return;
|
|
189
|
+
}
|
|
190
|
+
const el = document.createElement("div");
|
|
191
|
+
el.className = "qa-key";
|
|
192
|
+
el.textContent = label;
|
|
193
|
+
keysEl.appendChild(el);
|
|
194
|
+
el.addEventListener("animationend", () => el.remove());
|
|
195
|
+
};
|
|
196
|
+
state.onKeyUp = (key) => {
|
|
197
|
+
const el = activeModifiers.get(key);
|
|
198
|
+
if (el) {
|
|
199
|
+
el.remove();
|
|
200
|
+
activeModifiers.delete(key);
|
|
201
|
+
}
|
|
202
|
+
};
|
|
203
|
+
state.onAnnotate = (text, durationMs) => {
|
|
204
|
+
const el = document.createElement("div");
|
|
205
|
+
el.className = "qa-subtitle-text";
|
|
206
|
+
el.style.setProperty("--qa-subtitle-ms", durationMs + "ms");
|
|
207
|
+
el.textContent = text;
|
|
208
|
+
subtitleEl.innerHTML = "";
|
|
209
|
+
subtitleEl.appendChild(el);
|
|
210
|
+
el.addEventListener("animationend", () => el.remove());
|
|
211
|
+
};
|
|
212
|
+
}
|
|
213
|
+
//#endregion
|
|
214
|
+
//#region src/audio-capture.ts
|
|
215
|
+
/**
|
|
216
|
+
* Browser-side audio capture script.
|
|
217
|
+
*
|
|
218
|
+
* Injected via addInitScript. Monkey-patches AudioContext so that all audio
|
|
219
|
+
* routed to ctx.destination gets tapped by a ScriptProcessorNode.
|
|
220
|
+
* PCM float32 chunks are sent to Node via page.exposeFunction('__qaHudAudioChunk').
|
|
221
|
+
* Part of the demowright video overlay toolkit.
|
|
222
|
+
*/
|
|
223
|
+
function generateAudioCaptureScript() {
|
|
224
|
+
return `(${audioCaptureMain.toString()})();`;
|
|
225
|
+
}
|
|
226
|
+
function audioCaptureMain() {
|
|
227
|
+
if (window.__qaHudAudioCapture) return;
|
|
228
|
+
window.__qaHudAudioCapture = true;
|
|
229
|
+
const BUFFER_SIZE = 4096;
|
|
230
|
+
const origConnect = AudioNode.prototype.connect;
|
|
231
|
+
const origDisconnect = AudioNode.prototype.disconnect;
|
|
232
|
+
const interceptors = /* @__PURE__ */ new WeakMap();
|
|
233
|
+
function getInterceptor(ctx, dest) {
|
|
234
|
+
let gain = interceptors.get(dest);
|
|
235
|
+
if (gain) return gain;
|
|
236
|
+
gain = ctx.createGain();
|
|
237
|
+
const processor = ctx.createScriptProcessor(BUFFER_SIZE, 2, 2);
|
|
238
|
+
processor.onaudioprocess = (e) => {
|
|
239
|
+
const left = e.inputBuffer.getChannelData(0);
|
|
240
|
+
const right = e.inputBuffer.numberOfChannels > 1 ? e.inputBuffer.getChannelData(1) : left;
|
|
241
|
+
e.outputBuffer.getChannelData(0).set(left);
|
|
242
|
+
if (e.outputBuffer.numberOfChannels > 1) e.outputBuffer.getChannelData(1).set(right);
|
|
243
|
+
const send = window.__qaHudAudioChunk;
|
|
244
|
+
if (typeof send === "function") {
|
|
245
|
+
const interleaved = new Float32Array(left.length * 2);
|
|
246
|
+
for (let i = 0; i < left.length; i++) {
|
|
247
|
+
interleaved[i * 2] = left[i];
|
|
248
|
+
interleaved[i * 2 + 1] = right[i];
|
|
249
|
+
}
|
|
250
|
+
send(Array.from(interleaved), ctx.sampleRate);
|
|
251
|
+
}
|
|
252
|
+
};
|
|
253
|
+
origConnect.call(gain, processor);
|
|
254
|
+
origConnect.call(processor, dest);
|
|
255
|
+
interceptors.set(dest, gain);
|
|
256
|
+
return gain;
|
|
257
|
+
}
|
|
258
|
+
AudioNode.prototype.connect = function(dest, output, input) {
|
|
259
|
+
if (dest instanceof AudioDestinationNode) {
|
|
260
|
+
const gain = getInterceptor(dest.context, dest);
|
|
261
|
+
return origConnect.call(this, gain, output, input);
|
|
262
|
+
}
|
|
263
|
+
return origConnect.call(this, dest, output, input);
|
|
264
|
+
};
|
|
265
|
+
AudioNode.prototype.disconnect = function(dest) {
|
|
266
|
+
if (dest instanceof AudioDestinationNode) {
|
|
267
|
+
const gain = interceptors.get(dest);
|
|
268
|
+
if (gain) return origDisconnect.call(this, gain);
|
|
269
|
+
}
|
|
270
|
+
return origDisconnect.call(this, dest);
|
|
271
|
+
};
|
|
272
|
+
const mediaElements = /* @__PURE__ */ new WeakSet();
|
|
273
|
+
const origPlay = HTMLMediaElement.prototype.play;
|
|
274
|
+
HTMLMediaElement.prototype.play = function() {
|
|
275
|
+
if (!mediaElements.has(this)) {
|
|
276
|
+
mediaElements.add(this);
|
|
277
|
+
try {
|
|
278
|
+
const ctx = new AudioContext();
|
|
279
|
+
ctx.createMediaElementSource(this).connect(ctx.destination);
|
|
280
|
+
} catch {}
|
|
281
|
+
}
|
|
282
|
+
return origPlay.call(this);
|
|
283
|
+
};
|
|
284
|
+
}
|
|
285
|
+
//#endregion
|
|
286
|
+
//#region src/audio-writer.ts
|
|
287
|
+
/**
|
|
288
|
+
* Node-side WAV file writer.
|
|
289
|
+
*
|
|
290
|
+
* Collects interleaved PCM Float32 chunks from the browser audio capture
|
|
291
|
+
* and writes them to a WAV file on close.
|
|
292
|
+
*/
|
|
293
|
+
var AudioWriter = class {
|
|
294
|
+
chunks = [];
|
|
295
|
+
sampleRate = 44100;
|
|
296
|
+
channels = 2;
|
|
297
|
+
/**
|
|
298
|
+
* Called from the browser via page.exposeFunction.
|
|
299
|
+
* Receives interleaved stereo float32 samples.
|
|
300
|
+
*/
|
|
301
|
+
addChunk(samples, sampleRate) {
|
|
302
|
+
this.sampleRate = sampleRate;
|
|
303
|
+
this.chunks.push(new Float32Array(samples));
|
|
304
|
+
}
|
|
305
|
+
/** Total samples collected (interleaved, so / channels for per-channel) */
|
|
306
|
+
get totalSamples() {
|
|
307
|
+
return this.chunks.reduce((sum, c) => sum + c.length, 0);
|
|
308
|
+
}
|
|
309
|
+
get duration() {
|
|
310
|
+
return this.totalSamples / this.channels / this.sampleRate;
|
|
311
|
+
}
|
|
312
|
+
/**
|
|
313
|
+
* Write collected audio to a WAV file.
|
|
314
|
+
*/
|
|
315
|
+
save(filePath) {
|
|
316
|
+
const totalSamples = this.totalSamples;
|
|
317
|
+
if (totalSamples === 0) return;
|
|
318
|
+
const int16 = new Int16Array(totalSamples);
|
|
319
|
+
let offset = 0;
|
|
320
|
+
for (const chunk of this.chunks) for (let i = 0; i < chunk.length; i++) {
|
|
321
|
+
const s = Math.max(-1, Math.min(1, chunk[i]));
|
|
322
|
+
int16[offset++] = s < 0 ? s * 32768 : s * 32767;
|
|
323
|
+
}
|
|
324
|
+
const dataBytes = int16.length * 2;
|
|
325
|
+
const buffer = Buffer.alloc(44 + dataBytes);
|
|
326
|
+
buffer.write("RIFF", 0);
|
|
327
|
+
buffer.writeUInt32LE(36 + dataBytes, 4);
|
|
328
|
+
buffer.write("WAVE", 8);
|
|
329
|
+
buffer.write("fmt ", 12);
|
|
330
|
+
buffer.writeUInt32LE(16, 16);
|
|
331
|
+
buffer.writeUInt16LE(1, 20);
|
|
332
|
+
buffer.writeUInt16LE(this.channels, 22);
|
|
333
|
+
buffer.writeUInt32LE(this.sampleRate, 24);
|
|
334
|
+
buffer.writeUInt32LE(this.sampleRate * this.channels * 2, 28);
|
|
335
|
+
buffer.writeUInt16LE(this.channels * 2, 32);
|
|
336
|
+
buffer.writeUInt16LE(16, 34);
|
|
337
|
+
buffer.write("data", 36);
|
|
338
|
+
buffer.writeUInt32LE(dataBytes, 40);
|
|
339
|
+
Buffer.from(int16.buffer).copy(buffer, 44);
|
|
340
|
+
writeFileSync(filePath, buffer);
|
|
341
|
+
}
|
|
342
|
+
/** Reset for reuse */
|
|
343
|
+
clear() {
|
|
344
|
+
this.chunks = [];
|
|
345
|
+
}
|
|
346
|
+
};
|
|
347
|
+
//#endregion
|
|
348
|
+
//#region src/setup.ts
|
|
349
|
+
/**
|
|
350
|
+
* Core HUD setup logic — shared by all integration approaches.
|
|
351
|
+
*/
|
|
352
|
+
init_hud_registry();
|
|
353
|
+
const defaultOptions = {
|
|
354
|
+
cursor: true,
|
|
355
|
+
keyboard: true,
|
|
356
|
+
cursorStyle: "default",
|
|
357
|
+
keyFadeMs: 1500,
|
|
358
|
+
actionDelay: 120,
|
|
359
|
+
audio: false,
|
|
360
|
+
tts: false,
|
|
361
|
+
autoAnnotate: false,
|
|
362
|
+
outputDir: ".demowright"
|
|
363
|
+
};
|
|
364
|
+
/**
|
|
365
|
+
* Apply the demowright HUD to an existing BrowserContext.
|
|
366
|
+
* Returns an AudioWriter if audio capture is enabled (call .save() after test).
|
|
367
|
+
*/
|
|
368
|
+
async function applyHud(context, options) {
|
|
369
|
+
const opts = {
|
|
370
|
+
...defaultOptions,
|
|
371
|
+
...options
|
|
372
|
+
};
|
|
373
|
+
setGlobalOutputDir(opts.outputDir);
|
|
374
|
+
const contextStartMs = Date.now();
|
|
375
|
+
await context.addInitScript(generateListenerScript());
|
|
376
|
+
let audioWriter;
|
|
377
|
+
if (opts.audio) {
|
|
378
|
+
audioWriter = new AudioWriter();
|
|
379
|
+
await context.addInitScript(generateAudioCaptureScript());
|
|
380
|
+
}
|
|
381
|
+
const hudOpts = {
|
|
382
|
+
cursor: opts.cursor,
|
|
383
|
+
keyboard: opts.keyboard,
|
|
384
|
+
cursorStyle: opts.cursorStyle,
|
|
385
|
+
keyFadeMs: opts.keyFadeMs
|
|
386
|
+
};
|
|
387
|
+
const domInjector = getDomInjector();
|
|
388
|
+
const videoPaths = [];
|
|
389
|
+
async function setupPage(page) {
|
|
390
|
+
registerHudPage(page, { tts: opts.tts });
|
|
391
|
+
wrapNavigation(page, domInjector, hudOpts);
|
|
392
|
+
if (opts.actionDelay > 0) patchPageDelay(page, opts.actionDelay);
|
|
393
|
+
if (audioWriter) {
|
|
394
|
+
await setupAudioCapture(page, audioWriter);
|
|
395
|
+
try {
|
|
396
|
+
const vp = await page.video()?.path();
|
|
397
|
+
if (vp) videoPaths.push(vp);
|
|
398
|
+
} catch {}
|
|
399
|
+
}
|
|
400
|
+
}
|
|
401
|
+
for (const page of context.pages()) await setupPage(page);
|
|
402
|
+
context.on("page", (page) => setupPage(page));
|
|
403
|
+
if (audioWriter && opts.audio) {
|
|
404
|
+
const outDir = join(process.cwd(), opts.outputDir);
|
|
405
|
+
const tmpDir = join(outDir, "tmp");
|
|
406
|
+
mkdirSync(tmpDir, { recursive: true });
|
|
407
|
+
const audioPath = typeof opts.audio === "string" ? opts.audio : join(tmpDir, `demowright-audio-${Date.now()}.wav`);
|
|
408
|
+
const allPages = [...context.pages()];
|
|
409
|
+
context.on("page", (pg) => allPages.push(pg));
|
|
410
|
+
context.on("close", () => {
|
|
411
|
+
for (const pg of allPages) {
|
|
412
|
+
const job = getRenderJob(pg);
|
|
413
|
+
if (job) {
|
|
414
|
+
finalizeRenderJob(job, videoPaths);
|
|
415
|
+
return;
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
const segments = [];
|
|
419
|
+
for (const pg of allPages) segments.push(...getAudioSegments(pg));
|
|
420
|
+
let audioOffsetMs = 0;
|
|
421
|
+
if (segments.length > 0) {
|
|
422
|
+
const firstSegMs = segments[0].timestampMs;
|
|
423
|
+
audioOffsetMs = firstSegMs - contextStartMs;
|
|
424
|
+
buildAndSaveAudioTrack(segments, audioPath, firstSegMs);
|
|
425
|
+
} else if (audioWriter.totalSamples > 0) audioWriter.save(audioPath);
|
|
426
|
+
else return;
|
|
427
|
+
const mp4Path = join(outDir, audioPath.split("/").pop().replace(/\.wav$/, ".mp4"));
|
|
428
|
+
const trimSec = (audioOffsetMs / 1e3).toFixed(3);
|
|
429
|
+
let muxed = false;
|
|
430
|
+
for (const videoPath of videoPaths) try {
|
|
431
|
+
if (!existsSync(videoPath)) continue;
|
|
432
|
+
execSync(`ffmpeg -y -ss ${trimSec} -i "${videoPath}" -i "${audioPath}" -c:v libx264 -preset fast -c:a aac -shortest "${mp4Path}"`, { stdio: "pipe" });
|
|
433
|
+
muxed = true;
|
|
434
|
+
try {
|
|
435
|
+
unlinkSync(audioPath);
|
|
436
|
+
} catch {}
|
|
437
|
+
console.log(`[demowright] ✓ Rendered: ${mp4Path}`);
|
|
438
|
+
} catch {}
|
|
439
|
+
if (!muxed) {
|
|
440
|
+
console.log(`[demowright] Audio saved: ${audioPath}`);
|
|
441
|
+
console.log(`[demowright] Mux: ffmpeg -y -ss ${trimSec} -i <video.webm> -i "${audioPath}" -c:v libx264 -preset fast -c:a aac -shortest "${mp4Path}"`);
|
|
442
|
+
}
|
|
443
|
+
});
|
|
444
|
+
}
|
|
445
|
+
return audioWriter;
|
|
446
|
+
}
|
|
447
|
+
/**
|
|
448
|
+
* Wraps page navigation methods to inject HUD DOM after each navigation.
|
|
449
|
+
*/
|
|
450
|
+
function wrapNavigation(page, domInjector, hudOpts) {
|
|
451
|
+
async function injectDom() {
|
|
452
|
+
try {
|
|
453
|
+
if (page.isClosed()) return;
|
|
454
|
+
await page.evaluate(domInjector, hudOpts);
|
|
455
|
+
} catch {}
|
|
456
|
+
}
|
|
457
|
+
const originalGoto = page.goto.bind(page);
|
|
458
|
+
page.goto = async function(...args) {
|
|
459
|
+
const result = await originalGoto(...args);
|
|
460
|
+
await injectDom();
|
|
461
|
+
return result;
|
|
462
|
+
};
|
|
463
|
+
const originalReload = page.reload.bind(page);
|
|
464
|
+
page.reload = async function(...args) {
|
|
465
|
+
const result = await originalReload(...args);
|
|
466
|
+
await injectDom();
|
|
467
|
+
return result;
|
|
468
|
+
};
|
|
469
|
+
const originalSetContent = page.setContent.bind(page);
|
|
470
|
+
page.setContent = async function(...args) {
|
|
471
|
+
const result = await originalSetContent(...args);
|
|
472
|
+
await injectDom();
|
|
473
|
+
return result;
|
|
474
|
+
};
|
|
475
|
+
for (const method of ["goBack", "goForward"]) {
|
|
476
|
+
const original = page[method].bind(page);
|
|
477
|
+
page[method] = async function(...args) {
|
|
478
|
+
const result = await original(...args);
|
|
479
|
+
await injectDom();
|
|
480
|
+
return result;
|
|
481
|
+
};
|
|
482
|
+
}
|
|
483
|
+
}
|
|
484
|
+
function patchPageDelay(page, delay) {
|
|
485
|
+
for (const method of [
|
|
486
|
+
"click",
|
|
487
|
+
"dblclick",
|
|
488
|
+
"fill",
|
|
489
|
+
"press",
|
|
490
|
+
"type",
|
|
491
|
+
"check",
|
|
492
|
+
"uncheck",
|
|
493
|
+
"selectOption",
|
|
494
|
+
"hover",
|
|
495
|
+
"tap",
|
|
496
|
+
"dragAndDrop"
|
|
497
|
+
]) {
|
|
498
|
+
const original = page[method];
|
|
499
|
+
if (typeof original === "function") page[method] = async function(...args) {
|
|
500
|
+
const result = await original.apply(this, args);
|
|
501
|
+
await page.waitForTimeout(delay);
|
|
502
|
+
return result;
|
|
503
|
+
};
|
|
504
|
+
}
|
|
505
|
+
const kb = page.keyboard;
|
|
506
|
+
for (const method of [
|
|
507
|
+
"press",
|
|
508
|
+
"type",
|
|
509
|
+
"insertText"
|
|
510
|
+
]) {
|
|
511
|
+
const original = kb[method];
|
|
512
|
+
if (typeof original === "function") kb[method] = async function(...args) {
|
|
513
|
+
const result = await original.apply(this, args);
|
|
514
|
+
await page.waitForTimeout(delay);
|
|
515
|
+
return result;
|
|
516
|
+
};
|
|
517
|
+
}
|
|
518
|
+
}
|
|
519
|
+
/**
|
|
520
|
+
* Expose the audio chunk receiver on the page so the browser-side
|
|
521
|
+
* capture script can send PCM data to Node.
|
|
522
|
+
*/
|
|
523
|
+
async function setupAudioCapture(page, writer) {
|
|
524
|
+
try {
|
|
525
|
+
await page.exposeFunction("__qaHudAudioChunk", (samples, sampleRate) => {
|
|
526
|
+
writer.addChunk(samples, sampleRate);
|
|
527
|
+
});
|
|
528
|
+
} catch {}
|
|
529
|
+
}
|
|
530
|
+
/**
|
|
531
|
+
* Build a WAV file from stored TTS segments placed at their actual
|
|
532
|
+
* wall-clock timestamps. Silence fills gaps between segments.
|
|
533
|
+
* This eliminates drift caused by page.evaluate overhead.
|
|
534
|
+
*/
|
|
535
|
+
function buildAndSaveAudioTrack(segments, outputPath, contextStartMs) {
|
|
536
|
+
if (segments.length === 0) return;
|
|
537
|
+
const firstBuf = segments[0].wavBuf;
|
|
538
|
+
if (firstBuf.indexOf("data") + 8 < 8) return;
|
|
539
|
+
const sampleRate = firstBuf.readUInt32LE(24);
|
|
540
|
+
const channels = 2;
|
|
541
|
+
const baseMs = contextStartMs;
|
|
542
|
+
let totalMs = 0;
|
|
543
|
+
for (const seg of segments) {
|
|
544
|
+
const dOff = seg.wavBuf.indexOf("data") + 8;
|
|
545
|
+
if (dOff < 8) continue;
|
|
546
|
+
const sr = seg.wavBuf.readUInt32LE(24);
|
|
547
|
+
const ch = seg.wavBuf.readUInt16LE(22);
|
|
548
|
+
const segDur = seg.wavBuf.subarray(dOff).length / 2 / ch / sr * 1e3;
|
|
549
|
+
const endMs = seg.timestampMs - baseMs + segDur;
|
|
550
|
+
if (endMs > totalMs) totalMs = endMs;
|
|
551
|
+
}
|
|
552
|
+
const totalSamples = Math.ceil(totalMs / 1e3 * sampleRate * channels);
|
|
553
|
+
const trackBuffer = new Float32Array(totalSamples);
|
|
554
|
+
for (const seg of segments) {
|
|
555
|
+
const dOff = seg.wavBuf.indexOf("data") + 8;
|
|
556
|
+
if (dOff < 8) continue;
|
|
557
|
+
seg.wavBuf.readUInt32LE(24);
|
|
558
|
+
const ch = seg.wavBuf.readUInt16LE(22);
|
|
559
|
+
const pcmData = seg.wavBuf.subarray(dOff);
|
|
560
|
+
const sampleCount = pcmData.length / 2;
|
|
561
|
+
const float32 = new Float32Array(sampleCount);
|
|
562
|
+
for (let i = 0; i < sampleCount; i++) float32[i] = pcmData.readInt16LE(i * 2) / 32768;
|
|
563
|
+
const stereo = ch === 1 ? (() => {
|
|
564
|
+
const s = new Float32Array(sampleCount * 2);
|
|
565
|
+
for (let i = 0; i < sampleCount; i++) {
|
|
566
|
+
s[i * 2] = float32[i];
|
|
567
|
+
s[i * 2 + 1] = float32[i];
|
|
568
|
+
}
|
|
569
|
+
return s;
|
|
570
|
+
})() : float32;
|
|
571
|
+
const offsetMs = seg.timestampMs - baseMs;
|
|
572
|
+
const offsetSamples = Math.floor(offsetMs / 1e3 * sampleRate) * channels;
|
|
573
|
+
for (let i = 0; i < stereo.length && offsetSamples + i < trackBuffer.length; i++) trackBuffer[offsetSamples + i] += stereo[i];
|
|
574
|
+
}
|
|
575
|
+
const int16 = new Int16Array(trackBuffer.length);
|
|
576
|
+
for (let i = 0; i < trackBuffer.length; i++) {
|
|
577
|
+
const s = Math.max(-1, Math.min(1, trackBuffer[i]));
|
|
578
|
+
int16[i] = s < 0 ? s * 32768 : s * 32767;
|
|
579
|
+
}
|
|
580
|
+
const dataBytes = int16.length * 2;
|
|
581
|
+
const buffer = Buffer.alloc(44 + dataBytes);
|
|
582
|
+
buffer.write("RIFF", 0);
|
|
583
|
+
buffer.writeUInt32LE(36 + dataBytes, 4);
|
|
584
|
+
buffer.write("WAVE", 8);
|
|
585
|
+
buffer.write("fmt ", 12);
|
|
586
|
+
buffer.writeUInt32LE(16, 16);
|
|
587
|
+
buffer.writeUInt16LE(1, 20);
|
|
588
|
+
buffer.writeUInt16LE(channels, 22);
|
|
589
|
+
buffer.writeUInt32LE(sampleRate, 24);
|
|
590
|
+
buffer.writeUInt32LE(sampleRate * channels * 2, 28);
|
|
591
|
+
buffer.writeUInt16LE(channels * 2, 32);
|
|
592
|
+
buffer.writeUInt16LE(16, 34);
|
|
593
|
+
buffer.write("data", 36);
|
|
594
|
+
buffer.writeUInt32LE(dataBytes, 40);
|
|
595
|
+
Buffer.from(int16.buffer).copy(buffer, 44);
|
|
596
|
+
writeFileSync(outputPath, buffer);
|
|
597
|
+
}
|
|
598
|
+
/**
|
|
599
|
+
* Finalize a video render job: run ffmpeg with the actual video path,
|
|
600
|
+
* applying fade transitions, subtitle burn-in, and chapter metadata.
|
|
601
|
+
*/
|
|
602
|
+
function finalizeRenderJob(job, videoPaths) {
|
|
603
|
+
for (const videoPath of videoPaths) try {
|
|
604
|
+
if (!existsSync(videoPath)) continue;
|
|
605
|
+
const filters = [];
|
|
606
|
+
const transitions = job.timeline.filter((e) => e.kind === "transition");
|
|
607
|
+
for (const t of transitions) {
|
|
608
|
+
const startSec = (t.startMs / 1e3).toFixed(3);
|
|
609
|
+
const durSec = (t.durationMs / 1e3).toFixed(3);
|
|
610
|
+
const endSec = ((t.startMs + t.durationMs) / 1e3).toFixed(3);
|
|
611
|
+
filters.push(`fade=t=out:st=${startSec}:d=${durSec}`);
|
|
612
|
+
filters.push(`fade=t=in:st=${endSec}:d=${durSec}`);
|
|
613
|
+
}
|
|
614
|
+
if (existsSync(job.srtPath)) {
|
|
615
|
+
const escapedSrt = job.srtPath.replace(/\\/g, "/").replace(/:/g, "\\\\:").replace(/'/g, "'\\''");
|
|
616
|
+
filters.push(`subtitles='${escapedSrt}'`);
|
|
617
|
+
}
|
|
618
|
+
const vf = filters.length > 0 ? `-vf "${filters.join(",")}"` : "";
|
|
619
|
+
const chapterArgs = existsSync(job.chaptersPath) ? `-i "${job.chaptersPath}" -map_metadata 2` : "";
|
|
620
|
+
execSync([
|
|
621
|
+
`ffmpeg -y`,
|
|
622
|
+
`-i "${videoPath}"`,
|
|
623
|
+
`-i "${job.wavPath}"`,
|
|
624
|
+
chapterArgs,
|
|
625
|
+
vf,
|
|
626
|
+
`-c:v libx264 -preset fast`,
|
|
627
|
+
`-c:a aac`,
|
|
628
|
+
`-shortest`,
|
|
629
|
+
`"${job.mp4Path}"`
|
|
630
|
+
].filter(Boolean).join(" "), { stdio: "pipe" });
|
|
631
|
+
for (const f of [
|
|
632
|
+
job.wavPath,
|
|
633
|
+
job.srtPath,
|
|
634
|
+
job.chaptersPath
|
|
635
|
+
]) try {
|
|
636
|
+
unlinkSync(f);
|
|
637
|
+
} catch {}
|
|
638
|
+
console.log(`[demowright] ✓ Rendered: ${job.mp4Path}`);
|
|
639
|
+
return;
|
|
640
|
+
} catch (e) {
|
|
641
|
+
console.log(`[demowright] ffmpeg failed: ${e.message}`);
|
|
642
|
+
}
|
|
643
|
+
if (videoPaths.length > 0) {
|
|
644
|
+
const { buildFfmpegCommand } = (init_video_script(), __toCommonJS(video_script_exports));
|
|
645
|
+
if (typeof buildFfmpegCommand === "function") {
|
|
646
|
+
const cmd = buildFfmpegCommand(videoPaths[0] ?? "<video.webm>", job.wavPath, job.srtPath, job.chaptersPath, job.mp4Path, job.timeline);
|
|
647
|
+
console.log(`[demowright] Run manually:\n${cmd}`);
|
|
648
|
+
}
|
|
649
|
+
}
|
|
650
|
+
}
|
|
651
|
+
//#endregion
|
|
652
|
+
export { defaultOptions as n, AudioWriter as r, applyHud as t };
|