@viji-dev/core 0.2.6 → 0.2.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +135 -2
- package/dist/artist-dts.js +1 -1
- package/dist/artist-global.d.ts +151 -6
- package/dist/{artist-jsdoc.js → artist-js-ambient.d.ts} +78 -6
- package/dist/artist-jsdoc.d.ts +66 -0
- package/dist/assets/P5WorkerAdapter-bO_02bv6.js +345 -0
- package/dist/assets/P5WorkerAdapter-bO_02bv6.js.map +1 -0
- package/dist/assets/cv-tasks.worker.js +623 -0
- package/dist/assets/p5.min-BBA6UiVb.js +16810 -0
- package/dist/assets/p5.min-BBA6UiVb.js.map +1 -0
- package/dist/assets/viji.worker-BjMgRS7D.js +2150 -0
- package/dist/assets/viji.worker-BjMgRS7D.js.map +1 -0
- package/dist/assets/vision_bundle.js +2 -0
- package/dist/assets/wasm/vision_wasm_internal.js +20 -0
- package/dist/assets/wasm/vision_wasm_internal.wasm +0 -0
- package/dist/assets/wasm/vision_wasm_nosimd_internal.js +20 -0
- package/dist/assets/wasm/vision_wasm_nosimd_internal.wasm +0 -0
- package/dist/index.d.ts +182 -13
- package/dist/index.js +104 -22
- package/dist/index.js.map +1 -1
- package/package.json +15 -8
- package/dist/assets/viji.worker-BKsgIT1d.js +0 -1428
- package/dist/assets/viji.worker-BKsgIT1d.js.map +0 -1
|
@@ -0,0 +1,2150 @@
|
|
|
1
|
+
class ParameterSystem {
|
|
2
|
+
// Parameter system for Phase 2 (new object-based approach)
|
|
3
|
+
parameterDefinitions = /* @__PURE__ */ new Map();
|
|
4
|
+
parameterGroups = /* @__PURE__ */ new Map();
|
|
5
|
+
parameterValues = /* @__PURE__ */ new Map();
|
|
6
|
+
parameterObjects = /* @__PURE__ */ new Map();
|
|
7
|
+
// Maps parameter names to their objects
|
|
8
|
+
parametersDefined = false;
|
|
9
|
+
initialValuesSynced = false;
|
|
10
|
+
// Track if initial values have been synced from host
|
|
11
|
+
// Debug logging control
|
|
12
|
+
debugMode = false;
|
|
13
|
+
/**
|
|
14
|
+
* Enable or disable debug logging
|
|
15
|
+
*/
|
|
16
|
+
setDebugMode(enabled) {
|
|
17
|
+
this.debugMode = enabled;
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* Debug logging helper
|
|
21
|
+
*/
|
|
22
|
+
debugLog(message, ...args) {
|
|
23
|
+
if (this.debugMode) {
|
|
24
|
+
console.log(message, ...args);
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
// Message posting callback
|
|
28
|
+
postMessageCallback;
|
|
29
|
+
constructor(postMessageCallback) {
|
|
30
|
+
this.postMessageCallback = postMessageCallback;
|
|
31
|
+
}
|
|
32
|
+
// Parameter helper function implementations (return parameter objects)
|
|
33
|
+
createSliderParameter(defaultValue, config) {
|
|
34
|
+
const paramName = config.label;
|
|
35
|
+
const sliderObject = {
|
|
36
|
+
value: defaultValue,
|
|
37
|
+
min: config.min ?? 0,
|
|
38
|
+
max: config.max ?? 100,
|
|
39
|
+
step: config.step ?? 1,
|
|
40
|
+
label: config.label,
|
|
41
|
+
description: config.description ?? "",
|
|
42
|
+
group: config.group ?? "general",
|
|
43
|
+
category: config.category ?? "general"
|
|
44
|
+
};
|
|
45
|
+
const definition = {
|
|
46
|
+
type: "slider",
|
|
47
|
+
defaultValue,
|
|
48
|
+
label: sliderObject.label,
|
|
49
|
+
description: sliderObject.description,
|
|
50
|
+
group: sliderObject.group,
|
|
51
|
+
category: sliderObject.category,
|
|
52
|
+
config: {
|
|
53
|
+
min: sliderObject.min,
|
|
54
|
+
max: sliderObject.max,
|
|
55
|
+
step: sliderObject.step
|
|
56
|
+
}
|
|
57
|
+
};
|
|
58
|
+
this.storeParameterDefinition(paramName, definition);
|
|
59
|
+
this.parameterObjects.set(paramName, sliderObject);
|
|
60
|
+
return sliderObject;
|
|
61
|
+
}
|
|
62
|
+
createColorParameter(defaultValue, config) {
|
|
63
|
+
const paramName = config.label;
|
|
64
|
+
const colorObject = {
|
|
65
|
+
value: defaultValue,
|
|
66
|
+
label: config.label,
|
|
67
|
+
description: config.description ?? "",
|
|
68
|
+
group: config.group ?? "general",
|
|
69
|
+
category: config.category ?? "general"
|
|
70
|
+
};
|
|
71
|
+
const definition = {
|
|
72
|
+
type: "color",
|
|
73
|
+
defaultValue,
|
|
74
|
+
label: colorObject.label,
|
|
75
|
+
description: colorObject.description,
|
|
76
|
+
group: colorObject.group,
|
|
77
|
+
category: colorObject.category
|
|
78
|
+
};
|
|
79
|
+
this.storeParameterDefinition(paramName, definition);
|
|
80
|
+
this.parameterObjects.set(paramName, colorObject);
|
|
81
|
+
return colorObject;
|
|
82
|
+
}
|
|
83
|
+
createToggleParameter(defaultValue, config) {
|
|
84
|
+
const paramName = config.label;
|
|
85
|
+
const toggleObject = {
|
|
86
|
+
value: defaultValue,
|
|
87
|
+
label: config.label,
|
|
88
|
+
description: config.description ?? "",
|
|
89
|
+
group: config.group ?? "general",
|
|
90
|
+
category: config.category ?? "general"
|
|
91
|
+
};
|
|
92
|
+
const definition = {
|
|
93
|
+
type: "toggle",
|
|
94
|
+
defaultValue,
|
|
95
|
+
label: toggleObject.label,
|
|
96
|
+
description: toggleObject.description,
|
|
97
|
+
group: toggleObject.group,
|
|
98
|
+
category: toggleObject.category
|
|
99
|
+
};
|
|
100
|
+
this.storeParameterDefinition(paramName, definition);
|
|
101
|
+
this.parameterObjects.set(paramName, toggleObject);
|
|
102
|
+
return toggleObject;
|
|
103
|
+
}
|
|
104
|
+
createSelectParameter(defaultValue, config) {
|
|
105
|
+
const paramName = config.label;
|
|
106
|
+
const selectObject = {
|
|
107
|
+
value: defaultValue,
|
|
108
|
+
options: config.options,
|
|
109
|
+
label: config.label,
|
|
110
|
+
description: config.description ?? "",
|
|
111
|
+
group: config.group ?? "general",
|
|
112
|
+
category: config.category ?? "general"
|
|
113
|
+
};
|
|
114
|
+
const definition = {
|
|
115
|
+
type: "select",
|
|
116
|
+
defaultValue,
|
|
117
|
+
label: selectObject.label,
|
|
118
|
+
description: selectObject.description,
|
|
119
|
+
group: selectObject.group,
|
|
120
|
+
category: selectObject.category,
|
|
121
|
+
config: {
|
|
122
|
+
options: selectObject.options
|
|
123
|
+
}
|
|
124
|
+
};
|
|
125
|
+
this.storeParameterDefinition(paramName, definition);
|
|
126
|
+
this.parameterObjects.set(paramName, selectObject);
|
|
127
|
+
return selectObject;
|
|
128
|
+
}
|
|
129
|
+
createTextParameter(defaultValue, config) {
|
|
130
|
+
const paramName = config.label;
|
|
131
|
+
const textObject = {
|
|
132
|
+
value: defaultValue,
|
|
133
|
+
maxLength: config.maxLength ?? 1e3,
|
|
134
|
+
label: config.label,
|
|
135
|
+
description: config.description ?? "",
|
|
136
|
+
group: config.group ?? "general",
|
|
137
|
+
category: config.category ?? "general"
|
|
138
|
+
};
|
|
139
|
+
const definition = {
|
|
140
|
+
type: "text",
|
|
141
|
+
defaultValue,
|
|
142
|
+
label: textObject.label,
|
|
143
|
+
description: textObject.description,
|
|
144
|
+
group: textObject.group,
|
|
145
|
+
category: textObject.category,
|
|
146
|
+
config: {
|
|
147
|
+
maxLength: textObject.maxLength
|
|
148
|
+
}
|
|
149
|
+
};
|
|
150
|
+
this.storeParameterDefinition(paramName, definition);
|
|
151
|
+
this.parameterObjects.set(paramName, textObject);
|
|
152
|
+
return textObject;
|
|
153
|
+
}
|
|
154
|
+
createNumberParameter(defaultValue, config) {
|
|
155
|
+
const paramName = config.label;
|
|
156
|
+
const numberObject = {
|
|
157
|
+
value: defaultValue,
|
|
158
|
+
min: config.min ?? 0,
|
|
159
|
+
max: config.max ?? 100,
|
|
160
|
+
step: config.step ?? 1,
|
|
161
|
+
label: config.label,
|
|
162
|
+
description: config.description ?? "",
|
|
163
|
+
group: config.group ?? "general",
|
|
164
|
+
category: config.category ?? "general"
|
|
165
|
+
};
|
|
166
|
+
const definition = {
|
|
167
|
+
type: "number",
|
|
168
|
+
defaultValue,
|
|
169
|
+
label: numberObject.label,
|
|
170
|
+
description: numberObject.description,
|
|
171
|
+
group: numberObject.group,
|
|
172
|
+
category: numberObject.category,
|
|
173
|
+
config: {
|
|
174
|
+
min: numberObject.min,
|
|
175
|
+
max: numberObject.max,
|
|
176
|
+
step: numberObject.step
|
|
177
|
+
}
|
|
178
|
+
};
|
|
179
|
+
this.storeParameterDefinition(paramName, definition);
|
|
180
|
+
this.parameterObjects.set(paramName, numberObject);
|
|
181
|
+
return numberObject;
|
|
182
|
+
}
|
|
183
|
+
createImageParameter(defaultValue, config) {
|
|
184
|
+
const paramName = config.label;
|
|
185
|
+
const imageObject = {
|
|
186
|
+
value: defaultValue,
|
|
187
|
+
label: config.label,
|
|
188
|
+
description: config.description ?? "",
|
|
189
|
+
group: config.group ?? "general",
|
|
190
|
+
category: config.category ?? "general"
|
|
191
|
+
};
|
|
192
|
+
const definition = {
|
|
193
|
+
type: "image",
|
|
194
|
+
defaultValue,
|
|
195
|
+
label: imageObject.label,
|
|
196
|
+
description: imageObject.description,
|
|
197
|
+
group: imageObject.group,
|
|
198
|
+
category: imageObject.category
|
|
199
|
+
};
|
|
200
|
+
this.storeParameterDefinition(paramName, definition);
|
|
201
|
+
this.parameterObjects.set(paramName, imageObject);
|
|
202
|
+
return imageObject;
|
|
203
|
+
}
|
|
204
|
+
storeParameterDefinition(name, definition) {
|
|
205
|
+
this.parameterDefinitions.set(name, definition);
|
|
206
|
+
this.parameterValues.set(name, definition.defaultValue);
|
|
207
|
+
}
|
|
208
|
+
updateParameterValue(name, value) {
|
|
209
|
+
const definition = this.parameterDefinitions.get(name);
|
|
210
|
+
if (!definition) {
|
|
211
|
+
console.warn(`Unknown parameter: ${name}. Available parameters:`, Array.from(this.parameterDefinitions.keys()));
|
|
212
|
+
return false;
|
|
213
|
+
}
|
|
214
|
+
if (!this.validateParameterValue(name, value, definition)) {
|
|
215
|
+
console.warn(`Validation failed for parameter ${name} = ${value}`);
|
|
216
|
+
return false;
|
|
217
|
+
}
|
|
218
|
+
const currentValue = this.parameterValues.get(name);
|
|
219
|
+
const isInitialSync = !this.initialValuesSynced;
|
|
220
|
+
if (currentValue === value && !isInitialSync) {
|
|
221
|
+
return false;
|
|
222
|
+
}
|
|
223
|
+
this.parameterValues.set(name, value);
|
|
224
|
+
const parameterObject = this.parameterObjects.get(name);
|
|
225
|
+
if (parameterObject) {
|
|
226
|
+
parameterObject.value = value;
|
|
227
|
+
}
|
|
228
|
+
return true;
|
|
229
|
+
}
|
|
230
|
+
validateParameterValue(name, value, definition) {
|
|
231
|
+
if (definition.validate && !definition.validate(value)) {
|
|
232
|
+
console.error(`Custom validation failed for parameter '${name}': ${value}`);
|
|
233
|
+
return false;
|
|
234
|
+
}
|
|
235
|
+
switch (definition.type) {
|
|
236
|
+
case "slider":
|
|
237
|
+
case "number":
|
|
238
|
+
if (typeof value !== "number" || isNaN(value)) {
|
|
239
|
+
console.error(`Parameter '${name}' must be a number, got: ${value}`);
|
|
240
|
+
return false;
|
|
241
|
+
}
|
|
242
|
+
if (definition.config?.min !== void 0 && value < definition.config.min) {
|
|
243
|
+
console.error(`Parameter '${name}' value ${value} is below minimum ${definition.config.min}`);
|
|
244
|
+
return false;
|
|
245
|
+
}
|
|
246
|
+
if (definition.config?.max !== void 0 && value > definition.config.max) {
|
|
247
|
+
console.error(`Parameter '${name}' value ${value} is above maximum ${definition.config.max}`);
|
|
248
|
+
return false;
|
|
249
|
+
}
|
|
250
|
+
break;
|
|
251
|
+
case "color":
|
|
252
|
+
if (typeof value !== "string" || !/^#[0-9A-Fa-f]{6}$/.test(value)) {
|
|
253
|
+
console.error(`Parameter '${name}' must be a valid hex color, got: ${value}`);
|
|
254
|
+
return false;
|
|
255
|
+
}
|
|
256
|
+
break;
|
|
257
|
+
case "toggle":
|
|
258
|
+
if (typeof value !== "boolean") {
|
|
259
|
+
console.error(`Parameter '${name}' must be a boolean, got: ${value}`);
|
|
260
|
+
return false;
|
|
261
|
+
}
|
|
262
|
+
break;
|
|
263
|
+
case "select":
|
|
264
|
+
if (!definition.config?.options || !definition.config.options.includes(value)) {
|
|
265
|
+
console.error(`Parameter '${name}' value ${value} is not in options: ${definition.config?.options}`);
|
|
266
|
+
return false;
|
|
267
|
+
}
|
|
268
|
+
break;
|
|
269
|
+
case "text":
|
|
270
|
+
if (typeof value !== "string") {
|
|
271
|
+
console.error(`Parameter '${name}' must be a string, got: ${value}`);
|
|
272
|
+
return false;
|
|
273
|
+
}
|
|
274
|
+
if (definition.config?.maxLength && value.length > definition.config.maxLength) {
|
|
275
|
+
console.error(`Parameter '${name}' text too long: ${value.length} > ${definition.config.maxLength}`);
|
|
276
|
+
return false;
|
|
277
|
+
}
|
|
278
|
+
break;
|
|
279
|
+
case "image":
|
|
280
|
+
if (value !== null && !(value instanceof ImageBitmap) && !(value instanceof OffscreenCanvas)) {
|
|
281
|
+
console.error(`Parameter '${name}' must be null, ImageBitmap, or OffscreenCanvas, got: ${value}`);
|
|
282
|
+
return false;
|
|
283
|
+
}
|
|
284
|
+
break;
|
|
285
|
+
}
|
|
286
|
+
return true;
|
|
287
|
+
}
|
|
288
|
+
// Reset parameter state (called when loading new scene)
|
|
289
|
+
resetParameterState() {
|
|
290
|
+
this.parametersDefined = false;
|
|
291
|
+
this.initialValuesSynced = false;
|
|
292
|
+
this.parameterDefinitions.clear();
|
|
293
|
+
this.parameterGroups.clear();
|
|
294
|
+
this.parameterValues.clear();
|
|
295
|
+
this.parameterObjects.clear();
|
|
296
|
+
}
|
|
297
|
+
// Send all parameters (from helper functions) to host
|
|
298
|
+
sendAllParametersToHost() {
|
|
299
|
+
if (this.parametersDefined || this.parameterDefinitions.size === 0) {
|
|
300
|
+
return;
|
|
301
|
+
}
|
|
302
|
+
try {
|
|
303
|
+
const groups = /* @__PURE__ */ new Map();
|
|
304
|
+
for (const [paramName, paramDef] of this.parameterDefinitions) {
|
|
305
|
+
const groupName = paramDef.group || "general";
|
|
306
|
+
if (!groups.has(groupName)) {
|
|
307
|
+
const category = paramDef.category || "general";
|
|
308
|
+
groups.set(groupName, {
|
|
309
|
+
groupName,
|
|
310
|
+
category,
|
|
311
|
+
parameters: {}
|
|
312
|
+
});
|
|
313
|
+
}
|
|
314
|
+
const group = groups.get(groupName);
|
|
315
|
+
group.parameters[paramName] = paramDef;
|
|
316
|
+
}
|
|
317
|
+
this.parametersDefined = true;
|
|
318
|
+
this.postMessageCallback("parameters-defined", {
|
|
319
|
+
groups: Array.from(groups.values())
|
|
320
|
+
});
|
|
321
|
+
this.debugLog(`All parameters sent to host: ${this.parameterDefinitions.size} parameters in ${groups.size} groups`);
|
|
322
|
+
} catch (error) {
|
|
323
|
+
this.postMessageCallback("parameter-validation-error", {
|
|
324
|
+
message: `Failed to send parameters to host: ${error.message}`,
|
|
325
|
+
code: "PARAMETER_SENDING_ERROR"
|
|
326
|
+
});
|
|
327
|
+
}
|
|
328
|
+
}
|
|
329
|
+
// Mark initial values as synced
|
|
330
|
+
markInitialValuesSynced() {
|
|
331
|
+
this.initialValuesSynced = true;
|
|
332
|
+
}
|
|
333
|
+
// Get parameter count for performance reporting
|
|
334
|
+
getParameterCount() {
|
|
335
|
+
return this.parameterDefinitions.size;
|
|
336
|
+
}
|
|
337
|
+
// Get all parameter objects (for P5 adapter to add .p5 properties)
|
|
338
|
+
getAllParameterObjects() {
|
|
339
|
+
return this.parameterObjects;
|
|
340
|
+
}
|
|
341
|
+
}
|
|
342
|
+
class InteractionSystem {
|
|
343
|
+
// Interaction enabled state
|
|
344
|
+
isEnabled = true;
|
|
345
|
+
// Mouse interaction state
|
|
346
|
+
mouseState = {
|
|
347
|
+
x: 0,
|
|
348
|
+
y: 0,
|
|
349
|
+
isInCanvas: false,
|
|
350
|
+
isPressed: false,
|
|
351
|
+
leftButton: false,
|
|
352
|
+
rightButton: false,
|
|
353
|
+
middleButton: false,
|
|
354
|
+
velocity: { x: 0, y: 0 },
|
|
355
|
+
deltaX: 0,
|
|
356
|
+
deltaY: 0,
|
|
357
|
+
wheelDelta: 0,
|
|
358
|
+
wheelX: 0,
|
|
359
|
+
wheelY: 0,
|
|
360
|
+
wasPressed: false,
|
|
361
|
+
wasReleased: false,
|
|
362
|
+
wasMoved: false
|
|
363
|
+
};
|
|
364
|
+
// Keyboard interaction state
|
|
365
|
+
keyboardState = {
|
|
366
|
+
isPressed: (key) => this.keyboardState.activeKeys.has(key.toLowerCase()),
|
|
367
|
+
wasPressed: (key) => this.keyboardState.pressedThisFrame.has(key.toLowerCase()),
|
|
368
|
+
wasReleased: (key) => this.keyboardState.releasedThisFrame.has(key.toLowerCase()),
|
|
369
|
+
activeKeys: /* @__PURE__ */ new Set(),
|
|
370
|
+
pressedThisFrame: /* @__PURE__ */ new Set(),
|
|
371
|
+
releasedThisFrame: /* @__PURE__ */ new Set(),
|
|
372
|
+
lastKeyPressed: "",
|
|
373
|
+
lastKeyReleased: "",
|
|
374
|
+
shift: false,
|
|
375
|
+
ctrl: false,
|
|
376
|
+
alt: false,
|
|
377
|
+
meta: false
|
|
378
|
+
};
|
|
379
|
+
// Touch interaction state
|
|
380
|
+
touchState = {
|
|
381
|
+
points: [],
|
|
382
|
+
count: 0,
|
|
383
|
+
started: [],
|
|
384
|
+
moved: [],
|
|
385
|
+
ended: [],
|
|
386
|
+
primary: null,
|
|
387
|
+
gestures: {
|
|
388
|
+
isPinching: false,
|
|
389
|
+
isRotating: false,
|
|
390
|
+
isPanning: false,
|
|
391
|
+
isTapping: false,
|
|
392
|
+
pinchScale: 1,
|
|
393
|
+
pinchDelta: 0,
|
|
394
|
+
rotationAngle: 0,
|
|
395
|
+
rotationDelta: 0,
|
|
396
|
+
panDelta: { x: 0, y: 0 },
|
|
397
|
+
tapCount: 0,
|
|
398
|
+
lastTapTime: 0,
|
|
399
|
+
tapPosition: null
|
|
400
|
+
}
|
|
401
|
+
};
|
|
402
|
+
constructor() {
|
|
403
|
+
this.handleMouseUpdate = this.handleMouseUpdate.bind(this);
|
|
404
|
+
this.handleKeyboardUpdate = this.handleKeyboardUpdate.bind(this);
|
|
405
|
+
this.handleTouchUpdate = this.handleTouchUpdate.bind(this);
|
|
406
|
+
this.frameStart = this.frameStart.bind(this);
|
|
407
|
+
}
|
|
408
|
+
/**
|
|
409
|
+
* Get the interaction APIs for inclusion in the viji object
|
|
410
|
+
*/
|
|
411
|
+
getInteractionAPIs() {
|
|
412
|
+
return {
|
|
413
|
+
mouse: this.mouseState,
|
|
414
|
+
keyboard: this.keyboardState,
|
|
415
|
+
touches: this.touchState
|
|
416
|
+
};
|
|
417
|
+
}
|
|
418
|
+
/**
|
|
419
|
+
* Called at the start of each frame to reset frame-based events
|
|
420
|
+
*/
|
|
421
|
+
frameStart() {
|
|
422
|
+
this.mouseState.wasPressed = false;
|
|
423
|
+
this.mouseState.wasReleased = false;
|
|
424
|
+
this.mouseState.wasMoved = false;
|
|
425
|
+
this.mouseState.wheelDelta = 0;
|
|
426
|
+
this.mouseState.wheelX = 0;
|
|
427
|
+
this.mouseState.wheelY = 0;
|
|
428
|
+
this.keyboardState.pressedThisFrame.clear();
|
|
429
|
+
this.keyboardState.releasedThisFrame.clear();
|
|
430
|
+
this.touchState.started = [];
|
|
431
|
+
this.touchState.moved = [];
|
|
432
|
+
this.touchState.ended = [];
|
|
433
|
+
this.touchState.gestures.isTapping = false;
|
|
434
|
+
this.touchState.gestures.pinchDelta = 0;
|
|
435
|
+
this.touchState.gestures.rotationDelta = 0;
|
|
436
|
+
}
|
|
437
|
+
/**
|
|
438
|
+
* Handle mouse update messages from the host
|
|
439
|
+
*/
|
|
440
|
+
handleMouseUpdate(data) {
|
|
441
|
+
if (!this.isEnabled) return;
|
|
442
|
+
this.mouseState.x = data.x;
|
|
443
|
+
this.mouseState.y = data.y;
|
|
444
|
+
this.mouseState.isInCanvas = data.isInCanvas !== void 0 ? data.isInCanvas : true;
|
|
445
|
+
this.mouseState.leftButton = (data.buttons & 1) !== 0;
|
|
446
|
+
this.mouseState.rightButton = (data.buttons & 2) !== 0;
|
|
447
|
+
this.mouseState.middleButton = (data.buttons & 4) !== 0;
|
|
448
|
+
this.mouseState.isPressed = data.buttons > 0;
|
|
449
|
+
this.mouseState.deltaX = data.deltaX || 0;
|
|
450
|
+
this.mouseState.deltaY = data.deltaY || 0;
|
|
451
|
+
this.mouseState.wheelDelta = data.wheelDeltaY || 0;
|
|
452
|
+
this.mouseState.wheelX = data.wheelDeltaX || 0;
|
|
453
|
+
this.mouseState.wheelY = data.wheelDeltaY || 0;
|
|
454
|
+
this.mouseState.velocity.x = data.deltaX || 0;
|
|
455
|
+
this.mouseState.velocity.y = data.deltaY || 0;
|
|
456
|
+
this.mouseState.wasPressed = data.wasPressed || false;
|
|
457
|
+
this.mouseState.wasReleased = data.wasReleased || false;
|
|
458
|
+
this.mouseState.wasMoved = data.deltaX !== 0 || data.deltaY !== 0;
|
|
459
|
+
}
|
|
460
|
+
/**
|
|
461
|
+
* Handle keyboard update messages from the host
|
|
462
|
+
*/
|
|
463
|
+
handleKeyboardUpdate(data) {
|
|
464
|
+
if (!this.isEnabled) return;
|
|
465
|
+
const key = data.key.toLowerCase();
|
|
466
|
+
if (data.type === "keydown") {
|
|
467
|
+
if (!this.keyboardState.activeKeys.has(key)) {
|
|
468
|
+
this.keyboardState.activeKeys.add(key);
|
|
469
|
+
this.keyboardState.pressedThisFrame.add(key);
|
|
470
|
+
this.keyboardState.lastKeyPressed = data.key;
|
|
471
|
+
}
|
|
472
|
+
} else if (data.type === "keyup") {
|
|
473
|
+
this.keyboardState.activeKeys.delete(key);
|
|
474
|
+
this.keyboardState.releasedThisFrame.add(key);
|
|
475
|
+
this.keyboardState.lastKeyReleased = data.key;
|
|
476
|
+
}
|
|
477
|
+
this.keyboardState.shift = data.shiftKey;
|
|
478
|
+
this.keyboardState.ctrl = data.ctrlKey;
|
|
479
|
+
this.keyboardState.alt = data.altKey;
|
|
480
|
+
this.keyboardState.meta = data.metaKey;
|
|
481
|
+
}
|
|
482
|
+
/**
|
|
483
|
+
* Handle touch update messages from the host
|
|
484
|
+
*/
|
|
485
|
+
handleTouchUpdate(data) {
|
|
486
|
+
if (!this.isEnabled) return;
|
|
487
|
+
this.touchState.started = [];
|
|
488
|
+
this.touchState.moved = [];
|
|
489
|
+
this.touchState.ended = [];
|
|
490
|
+
const touches = data.touches.map((touch) => ({
|
|
491
|
+
id: touch.identifier,
|
|
492
|
+
x: touch.clientX,
|
|
493
|
+
y: touch.clientY,
|
|
494
|
+
pressure: touch.pressure || 0,
|
|
495
|
+
radius: Math.max(touch.radiusX || 0, touch.radiusY || 0),
|
|
496
|
+
radiusX: touch.radiusX || 0,
|
|
497
|
+
radiusY: touch.radiusY || 0,
|
|
498
|
+
rotationAngle: touch.rotationAngle || 0,
|
|
499
|
+
force: touch.force || touch.pressure || 0,
|
|
500
|
+
deltaX: 0,
|
|
501
|
+
// Could be calculated if we track previous positions
|
|
502
|
+
deltaY: 0,
|
|
503
|
+
velocity: { x: 0, y: 0 },
|
|
504
|
+
// Could be calculated if we track movement
|
|
505
|
+
isNew: data.type === "touchstart",
|
|
506
|
+
isActive: true,
|
|
507
|
+
isEnding: data.type === "touchend" || data.type === "touchcancel"
|
|
508
|
+
}));
|
|
509
|
+
this.touchState.points = touches;
|
|
510
|
+
this.touchState.count = touches.length;
|
|
511
|
+
this.touchState.primary = touches[0] || null;
|
|
512
|
+
if (data.type === "touchstart") {
|
|
513
|
+
this.touchState.started = touches;
|
|
514
|
+
} else if (data.type === "touchmove") {
|
|
515
|
+
this.touchState.moved = touches;
|
|
516
|
+
} else if (data.type === "touchend" || data.type === "touchcancel") {
|
|
517
|
+
this.touchState.ended = touches;
|
|
518
|
+
}
|
|
519
|
+
this.touchState.gestures = {
|
|
520
|
+
isPinching: false,
|
|
521
|
+
isRotating: false,
|
|
522
|
+
isPanning: false,
|
|
523
|
+
isTapping: false,
|
|
524
|
+
pinchScale: 1,
|
|
525
|
+
pinchDelta: 0,
|
|
526
|
+
rotationAngle: 0,
|
|
527
|
+
rotationDelta: 0,
|
|
528
|
+
panDelta: { x: 0, y: 0 },
|
|
529
|
+
tapCount: 0,
|
|
530
|
+
lastTapTime: 0,
|
|
531
|
+
tapPosition: null
|
|
532
|
+
};
|
|
533
|
+
}
|
|
534
|
+
/**
|
|
535
|
+
* Reset all interaction state (called when loading new scene)
|
|
536
|
+
*/
|
|
537
|
+
resetInteractionState() {
|
|
538
|
+
Object.assign(this.mouseState, {
|
|
539
|
+
x: 0,
|
|
540
|
+
y: 0,
|
|
541
|
+
isInCanvas: false,
|
|
542
|
+
isPressed: false,
|
|
543
|
+
leftButton: false,
|
|
544
|
+
rightButton: false,
|
|
545
|
+
middleButton: false,
|
|
546
|
+
velocity: { x: 0, y: 0 },
|
|
547
|
+
deltaX: 0,
|
|
548
|
+
deltaY: 0,
|
|
549
|
+
wheelDelta: 0,
|
|
550
|
+
wheelX: 0,
|
|
551
|
+
wheelY: 0,
|
|
552
|
+
wasPressed: false,
|
|
553
|
+
wasReleased: false,
|
|
554
|
+
wasMoved: false
|
|
555
|
+
});
|
|
556
|
+
this.keyboardState.activeKeys.clear();
|
|
557
|
+
this.keyboardState.pressedThisFrame.clear();
|
|
558
|
+
this.keyboardState.releasedThisFrame.clear();
|
|
559
|
+
this.keyboardState.lastKeyPressed = "";
|
|
560
|
+
this.keyboardState.lastKeyReleased = "";
|
|
561
|
+
this.keyboardState.shift = false;
|
|
562
|
+
this.keyboardState.ctrl = false;
|
|
563
|
+
this.keyboardState.alt = false;
|
|
564
|
+
this.keyboardState.meta = false;
|
|
565
|
+
this.touchState.points = [];
|
|
566
|
+
this.touchState.count = 0;
|
|
567
|
+
this.touchState.started = [];
|
|
568
|
+
this.touchState.moved = [];
|
|
569
|
+
this.touchState.ended = [];
|
|
570
|
+
this.touchState.primary = null;
|
|
571
|
+
Object.assign(this.touchState.gestures, {
|
|
572
|
+
isPinching: false,
|
|
573
|
+
isRotating: false,
|
|
574
|
+
isPanning: false,
|
|
575
|
+
isTapping: false,
|
|
576
|
+
pinchScale: 1,
|
|
577
|
+
pinchDelta: 0,
|
|
578
|
+
rotationAngle: 0,
|
|
579
|
+
rotationDelta: 0,
|
|
580
|
+
panDelta: { x: 0, y: 0 },
|
|
581
|
+
tapCount: 0,
|
|
582
|
+
lastTapTime: 0,
|
|
583
|
+
tapPosition: null
|
|
584
|
+
});
|
|
585
|
+
}
|
|
586
|
+
/**
|
|
587
|
+
* Enable or disable interaction processing
|
|
588
|
+
*/
|
|
589
|
+
setInteractionEnabled(enabled) {
|
|
590
|
+
this.isEnabled = enabled;
|
|
591
|
+
if (!enabled) {
|
|
592
|
+
this.resetInteractionStates();
|
|
593
|
+
}
|
|
594
|
+
}
|
|
595
|
+
/**
|
|
596
|
+
* Get current interaction enabled state
|
|
597
|
+
*/
|
|
598
|
+
getInteractionEnabled() {
|
|
599
|
+
return this.isEnabled;
|
|
600
|
+
}
|
|
601
|
+
/**
|
|
602
|
+
* Reset all interaction states to default values
|
|
603
|
+
*/
|
|
604
|
+
resetInteractionStates() {
|
|
605
|
+
this.mouseState.x = 0;
|
|
606
|
+
this.mouseState.y = 0;
|
|
607
|
+
this.mouseState.isInCanvas = false;
|
|
608
|
+
this.mouseState.isPressed = false;
|
|
609
|
+
this.mouseState.leftButton = false;
|
|
610
|
+
this.mouseState.rightButton = false;
|
|
611
|
+
this.mouseState.middleButton = false;
|
|
612
|
+
this.mouseState.velocity.x = 0;
|
|
613
|
+
this.mouseState.velocity.y = 0;
|
|
614
|
+
this.mouseState.deltaX = 0;
|
|
615
|
+
this.mouseState.deltaY = 0;
|
|
616
|
+
this.mouseState.wheelDelta = 0;
|
|
617
|
+
this.mouseState.wheelX = 0;
|
|
618
|
+
this.mouseState.wheelY = 0;
|
|
619
|
+
this.mouseState.wasPressed = false;
|
|
620
|
+
this.mouseState.wasReleased = false;
|
|
621
|
+
this.mouseState.wasMoved = false;
|
|
622
|
+
this.keyboardState.activeKeys.clear();
|
|
623
|
+
this.keyboardState.pressedThisFrame.clear();
|
|
624
|
+
this.keyboardState.releasedThisFrame.clear();
|
|
625
|
+
this.keyboardState.lastKeyPressed = "";
|
|
626
|
+
this.keyboardState.lastKeyReleased = "";
|
|
627
|
+
this.keyboardState.shift = false;
|
|
628
|
+
this.keyboardState.ctrl = false;
|
|
629
|
+
this.keyboardState.alt = false;
|
|
630
|
+
this.keyboardState.meta = false;
|
|
631
|
+
this.touchState.points = [];
|
|
632
|
+
this.touchState.count = 0;
|
|
633
|
+
this.touchState.started = [];
|
|
634
|
+
this.touchState.moved = [];
|
|
635
|
+
this.touchState.ended = [];
|
|
636
|
+
this.touchState.primary = null;
|
|
637
|
+
this.touchState.gestures.isPinching = false;
|
|
638
|
+
this.touchState.gestures.isRotating = false;
|
|
639
|
+
this.touchState.gestures.isPanning = false;
|
|
640
|
+
this.touchState.gestures.isTapping = false;
|
|
641
|
+
this.touchState.gestures.pinchScale = 1;
|
|
642
|
+
this.touchState.gestures.pinchDelta = 0;
|
|
643
|
+
this.touchState.gestures.rotationAngle = 0;
|
|
644
|
+
this.touchState.gestures.rotationDelta = 0;
|
|
645
|
+
this.touchState.gestures.panDelta = { x: 0, y: 0 };
|
|
646
|
+
this.touchState.gestures.tapCount = 0;
|
|
647
|
+
this.touchState.gestures.lastTapTime = 0;
|
|
648
|
+
this.touchState.gestures.tapPosition = null;
|
|
649
|
+
}
|
|
650
|
+
}
|
|
651
|
+
class CVSystem {
|
|
652
|
+
// MediaPipe Tasks Vision worker
|
|
653
|
+
cvWorker = null;
|
|
654
|
+
workerRestartCount = 0;
|
|
655
|
+
maxWorkerRestarts = 3;
|
|
656
|
+
workerLastRestart = 0;
|
|
657
|
+
workerRestartCooldown = 5e3;
|
|
658
|
+
// 5 seconds
|
|
659
|
+
// Feature activation state
|
|
660
|
+
activeFeatures = /* @__PURE__ */ new Set();
|
|
661
|
+
pendingFeatures = /* @__PURE__ */ new Set();
|
|
662
|
+
// Features to restore after restart
|
|
663
|
+
// CV Results cache (for non-blocking processing)
|
|
664
|
+
results = {
|
|
665
|
+
faces: [],
|
|
666
|
+
hands: [],
|
|
667
|
+
pose: null,
|
|
668
|
+
segmentation: null
|
|
669
|
+
};
|
|
670
|
+
// Processing state and performance tracking
|
|
671
|
+
processing = false;
|
|
672
|
+
cvFrameCounter = 0;
|
|
673
|
+
cvFrameRateMode = "quarter";
|
|
674
|
+
// Default: 1/4 scene rate
|
|
675
|
+
sceneTargetFPS = 60;
|
|
676
|
+
// Will be updated from scene processing rate
|
|
677
|
+
processingStartTime = 0;
|
|
678
|
+
processingTimes = [];
|
|
679
|
+
// CV Frame Rate Tracking (similar to main core)
|
|
680
|
+
cvFrameTimes = [];
|
|
681
|
+
lastCVFrameTime = 0;
|
|
682
|
+
actualCVFPS = 0;
|
|
683
|
+
debugMode = false;
|
|
684
|
+
// Debug mode disabled for production
|
|
685
|
+
constructor() {
|
|
686
|
+
this.debugLog("🔧 CVSystem initialized for MediaPipe Tasks Vision");
|
|
687
|
+
}
|
|
688
|
+
debugLog(...args) {
|
|
689
|
+
if (this.debugMode) {
|
|
690
|
+
console.log("🔧 [CVSystem]", ...args);
|
|
691
|
+
}
|
|
692
|
+
}
|
|
693
|
+
setDebugMode(enabled) {
|
|
694
|
+
this.debugMode = enabled;
|
|
695
|
+
this.debugLog(`Debug mode ${enabled ? "enabled" : "disabled"}`);
|
|
696
|
+
}
|
|
697
|
+
/**
|
|
698
|
+
* Update CV frame rate configuration (called from worker)
|
|
699
|
+
*/
|
|
700
|
+
updateCVFrameRate(mode, sceneTargetFPS) {
|
|
701
|
+
this.cvFrameRateMode = mode;
|
|
702
|
+
this.sceneTargetFPS = sceneTargetFPS;
|
|
703
|
+
this.debugLog(`CV frame rate updated: mode=${mode}, sceneTargetFPS=${sceneTargetFPS}`);
|
|
704
|
+
}
|
|
705
|
+
/**
|
|
706
|
+
* Initialize MediaPipe Tasks Vision worker
|
|
707
|
+
*/
|
|
708
|
+
async ensureCVWorker() {
|
|
709
|
+
if (this.cvWorker) return;
|
|
710
|
+
try {
|
|
711
|
+
this.debugLog("🔧 Creating MediaPipe Tasks Vision worker...");
|
|
712
|
+
const workerUrl = "/dist/assets/cv-tasks.worker.js";
|
|
713
|
+
this.cvWorker = new Worker(workerUrl);
|
|
714
|
+
this.cvWorker.addEventListener("message", (evt) => {
|
|
715
|
+
const msg = evt.data;
|
|
716
|
+
this.debugLog(`📨 [CV Worker -> CVSystem] ${msg.type}`, msg.success ? msg.data : msg.error);
|
|
717
|
+
});
|
|
718
|
+
this.cvWorker.onerror = (err) => {
|
|
719
|
+
this.debugLog("❌ CV worker error event", err.message);
|
|
720
|
+
this.handleWorkerFailure(`Worker error: ${err.message}`);
|
|
721
|
+
};
|
|
722
|
+
this.cvWorker.onmessageerror = (err) => {
|
|
723
|
+
this.debugLog("❌ CV worker message error", err);
|
|
724
|
+
this.handleWorkerFailure("Worker message error");
|
|
725
|
+
};
|
|
726
|
+
await this.postToCV("init", {});
|
|
727
|
+
this.debugLog("✅ CV worker initialized");
|
|
728
|
+
} catch (error) {
|
|
729
|
+
this.debugLog("❌ Failed to initialize CV worker:", error);
|
|
730
|
+
throw error;
|
|
731
|
+
}
|
|
732
|
+
}
|
|
733
|
+
/**
|
|
734
|
+
* Handle worker failure and attempt restart
|
|
735
|
+
*/
|
|
736
|
+
async handleWorkerFailure(reason) {
|
|
737
|
+
this.debugLog(`⚠️ CV Worker failure: ${reason}`);
|
|
738
|
+
if (this.cvWorker) {
|
|
739
|
+
this.cvWorker.terminate();
|
|
740
|
+
this.cvWorker = null;
|
|
741
|
+
}
|
|
742
|
+
const now = Date.now();
|
|
743
|
+
if (this.workerRestartCount >= this.maxWorkerRestarts) {
|
|
744
|
+
this.debugLog("❌ Max worker restarts exceeded, giving up");
|
|
745
|
+
return;
|
|
746
|
+
}
|
|
747
|
+
if (now - this.workerLastRestart < this.workerRestartCooldown) {
|
|
748
|
+
this.debugLog("⏱️ Worker restart cooldown active, skipping restart");
|
|
749
|
+
return;
|
|
750
|
+
}
|
|
751
|
+
this.pendingFeatures = new Set(this.activeFeatures);
|
|
752
|
+
this.activeFeatures.clear();
|
|
753
|
+
try {
|
|
754
|
+
this.workerRestartCount++;
|
|
755
|
+
this.workerLastRestart = now;
|
|
756
|
+
this.debugLog(`🔄 Restarting CV worker (attempt ${this.workerRestartCount}/${this.maxWorkerRestarts})`);
|
|
757
|
+
await this.ensureCVWorker();
|
|
758
|
+
if (this.pendingFeatures.size > 0) {
|
|
759
|
+
const featuresToRestore = Array.from(this.pendingFeatures);
|
|
760
|
+
this.debugLog(`🔄 Restoring features: [${featuresToRestore.join(", ")}]`);
|
|
761
|
+
try {
|
|
762
|
+
await this.postToCV("config", { features: featuresToRestore });
|
|
763
|
+
this.activeFeatures = new Set(this.pendingFeatures);
|
|
764
|
+
this.debugLog("✅ Features restored successfully");
|
|
765
|
+
} catch (error) {
|
|
766
|
+
this.debugLog("❌ Failed to restore features:", error);
|
|
767
|
+
}
|
|
768
|
+
this.pendingFeatures.clear();
|
|
769
|
+
}
|
|
770
|
+
this.debugLog("✅ CV worker restarted successfully");
|
|
771
|
+
} catch (error) {
|
|
772
|
+
this.debugLog("❌ Failed to restart CV worker:", error);
|
|
773
|
+
}
|
|
774
|
+
}
|
|
775
|
+
/**
|
|
776
|
+
* Send message to CV worker and wait for response
|
|
777
|
+
*/
|
|
778
|
+
postToCV(type, data, transfer) {
|
|
779
|
+
return new Promise((resolve, reject) => {
|
|
780
|
+
if (!this.cvWorker) {
|
|
781
|
+
return reject(new Error("CV worker not initialized"));
|
|
782
|
+
}
|
|
783
|
+
const timeout = setTimeout(() => {
|
|
784
|
+
this.debugLog(`⏱️ [CV Worker] timeout for ${type}`);
|
|
785
|
+
if (type === "config") {
|
|
786
|
+
this.handleWorkerFailure(`Timeout for ${type} message`);
|
|
787
|
+
}
|
|
788
|
+
reject(new Error(`CV worker timeout for ${type}`));
|
|
789
|
+
}, 5e3);
|
|
790
|
+
const onMessage = (ev) => {
|
|
791
|
+
const msg = ev.data;
|
|
792
|
+
if (msg.type === "result") {
|
|
793
|
+
clearTimeout(timeout);
|
|
794
|
+
this.cvWorker.removeEventListener("message", onMessage);
|
|
795
|
+
if (msg.success) {
|
|
796
|
+
this.debugLog(`✅ [CV Worker] response for ${type}`, msg.data);
|
|
797
|
+
resolve(msg.data);
|
|
798
|
+
} else {
|
|
799
|
+
this.debugLog(`❌ [CV Worker] error response for ${type}`, msg.error);
|
|
800
|
+
if (msg.restartRequired) {
|
|
801
|
+
this.handleWorkerFailure(`Worker reported restart required: ${msg.error}`);
|
|
802
|
+
}
|
|
803
|
+
reject(new Error(msg.error || "CV worker error"));
|
|
804
|
+
}
|
|
805
|
+
}
|
|
806
|
+
};
|
|
807
|
+
this.cvWorker.addEventListener("message", onMessage);
|
|
808
|
+
const message = {
|
|
809
|
+
type,
|
|
810
|
+
...data
|
|
811
|
+
};
|
|
812
|
+
this.debugLog(`📤 [CVSystem -> CV Worker] ${type}`, data);
|
|
813
|
+
this.cvWorker.postMessage(message, transfer || []);
|
|
814
|
+
});
|
|
815
|
+
}
|
|
816
|
+
/**
|
|
817
|
+
* Enable face detection feature (bounding boxes only)
|
|
818
|
+
*/
|
|
819
|
+
async enableFaceDetection() {
|
|
820
|
+
if (this.activeFeatures.has("faceDetection")) return;
|
|
821
|
+
try {
|
|
822
|
+
this.debugLog("🔧 Enabling face detection...");
|
|
823
|
+
if (this.activeFeatures.size >= 3 && !this.checkWebGLContextAvailability()) {
|
|
824
|
+
this.debugLog("⚠️ Warning: WebGL contexts may be running low. Consider disabling unused CV features.");
|
|
825
|
+
}
|
|
826
|
+
await this.ensureCVWorker();
|
|
827
|
+
const newFeatures = Array.from(this.activeFeatures).concat(["faceDetection"]);
|
|
828
|
+
await this.postToCV("config", { features: newFeatures });
|
|
829
|
+
this.activeFeatures.add("faceDetection");
|
|
830
|
+
this.debugLog("✅ Face detection enabled");
|
|
831
|
+
} catch (error) {
|
|
832
|
+
this.debugLog("❌ Failed to enable face detection:", error);
|
|
833
|
+
throw error;
|
|
834
|
+
}
|
|
835
|
+
}
|
|
836
|
+
/**
|
|
837
|
+
* Disable face detection and cleanup
|
|
838
|
+
*/
|
|
839
|
+
async disableFaceDetection() {
|
|
840
|
+
if (!this.activeFeatures.has("faceDetection")) return;
|
|
841
|
+
try {
|
|
842
|
+
this.debugLog("🔧 Disabling face detection...");
|
|
843
|
+
this.activeFeatures.delete("faceDetection");
|
|
844
|
+
if (!this.activeFeatures.has("faceMesh")) {
|
|
845
|
+
this.results.faces = [];
|
|
846
|
+
}
|
|
847
|
+
const newFeatures = Array.from(this.activeFeatures);
|
|
848
|
+
await this.postToCV("config", { features: newFeatures });
|
|
849
|
+
this.debugLog("✅ Face detection disabled and cleaned up");
|
|
850
|
+
} catch (error) {
|
|
851
|
+
this.debugLog("❌ Failed to disable face detection:", error);
|
|
852
|
+
throw error;
|
|
853
|
+
}
|
|
854
|
+
}
|
|
855
|
+
/**
|
|
856
|
+
* Enable face mesh feature (468-point facial landmarks)
|
|
857
|
+
*/
|
|
858
|
+
async enableFaceMesh() {
|
|
859
|
+
if (this.activeFeatures.has("faceMesh")) return;
|
|
860
|
+
try {
|
|
861
|
+
this.debugLog("🔧 Enabling face mesh...");
|
|
862
|
+
if (this.activeFeatures.size >= 3 && !this.checkWebGLContextAvailability()) {
|
|
863
|
+
this.debugLog("⚠️ Warning: WebGL contexts may be running low. Consider disabling unused CV features.");
|
|
864
|
+
}
|
|
865
|
+
await this.ensureCVWorker();
|
|
866
|
+
const newFeatures = Array.from(this.activeFeatures).concat(["faceMesh"]);
|
|
867
|
+
await this.postToCV("config", { features: newFeatures });
|
|
868
|
+
this.activeFeatures.add("faceMesh");
|
|
869
|
+
this.debugLog("✅ Face mesh enabled");
|
|
870
|
+
} catch (error) {
|
|
871
|
+
this.debugLog("❌ Failed to enable face mesh:", error);
|
|
872
|
+
throw error;
|
|
873
|
+
}
|
|
874
|
+
}
|
|
875
|
+
/**
|
|
876
|
+
* Disable face mesh and cleanup
|
|
877
|
+
*/
|
|
878
|
+
async disableFaceMesh() {
|
|
879
|
+
if (!this.activeFeatures.has("faceMesh")) return;
|
|
880
|
+
try {
|
|
881
|
+
this.debugLog("🔧 Disabling face mesh...");
|
|
882
|
+
this.activeFeatures.delete("faceMesh");
|
|
883
|
+
if (!this.activeFeatures.has("faceDetection")) {
|
|
884
|
+
this.results.faces = [];
|
|
885
|
+
}
|
|
886
|
+
const newFeatures = Array.from(this.activeFeatures);
|
|
887
|
+
await this.postToCV("config", { features: newFeatures });
|
|
888
|
+
this.debugLog("✅ Face mesh disabled and cleaned up");
|
|
889
|
+
} catch (error) {
|
|
890
|
+
this.debugLog("❌ Failed to disable face mesh:", error);
|
|
891
|
+
throw error;
|
|
892
|
+
}
|
|
893
|
+
}
|
|
894
|
+
/**
|
|
895
|
+
* Enable hand tracking feature
|
|
896
|
+
*/
|
|
897
|
+
async enableHandTracking() {
|
|
898
|
+
if (this.activeFeatures.has("handTracking")) return;
|
|
899
|
+
try {
|
|
900
|
+
this.debugLog("🔧 Enabling hand tracking...");
|
|
901
|
+
await this.ensureCVWorker();
|
|
902
|
+
const newFeatures = Array.from(this.activeFeatures).concat(["handTracking"]);
|
|
903
|
+
await this.postToCV("config", { features: newFeatures });
|
|
904
|
+
this.activeFeatures.add("handTracking");
|
|
905
|
+
this.debugLog("✅ Hand tracking enabled");
|
|
906
|
+
} catch (error) {
|
|
907
|
+
this.debugLog("❌ Failed to enable hand tracking:", error);
|
|
908
|
+
throw error;
|
|
909
|
+
}
|
|
910
|
+
}
|
|
911
|
+
/**
|
|
912
|
+
* Disable hand tracking and cleanup
|
|
913
|
+
*/
|
|
914
|
+
async disableHandTracking() {
|
|
915
|
+
if (!this.activeFeatures.has("handTracking")) return;
|
|
916
|
+
try {
|
|
917
|
+
this.debugLog("🔧 Disabling hand tracking...");
|
|
918
|
+
this.activeFeatures.delete("handTracking");
|
|
919
|
+
this.results.hands = [];
|
|
920
|
+
const newFeatures = Array.from(this.activeFeatures);
|
|
921
|
+
await this.postToCV("config", { features: newFeatures });
|
|
922
|
+
this.debugLog("✅ Hand tracking disabled and cleaned up");
|
|
923
|
+
} catch (error) {
|
|
924
|
+
this.debugLog("❌ Failed to disable hand tracking:", error);
|
|
925
|
+
throw error;
|
|
926
|
+
}
|
|
927
|
+
}
|
|
928
|
+
/**
|
|
929
|
+
* Enable pose detection feature
|
|
930
|
+
*/
|
|
931
|
+
async enablePoseDetection() {
|
|
932
|
+
if (this.activeFeatures.has("poseDetection")) return;
|
|
933
|
+
try {
|
|
934
|
+
this.debugLog("🔧 Enabling pose detection...");
|
|
935
|
+
await this.ensureCVWorker();
|
|
936
|
+
const newFeatures = Array.from(this.activeFeatures).concat(["poseDetection"]);
|
|
937
|
+
await this.postToCV("config", { features: newFeatures });
|
|
938
|
+
this.activeFeatures.add("poseDetection");
|
|
939
|
+
this.debugLog("✅ Pose detection enabled");
|
|
940
|
+
} catch (error) {
|
|
941
|
+
this.debugLog("❌ Failed to enable pose detection:", error);
|
|
942
|
+
throw error;
|
|
943
|
+
}
|
|
944
|
+
}
|
|
945
|
+
/**
|
|
946
|
+
* Disable pose detection and cleanup
|
|
947
|
+
*/
|
|
948
|
+
async disablePoseDetection() {
|
|
949
|
+
if (!this.activeFeatures.has("poseDetection")) return;
|
|
950
|
+
try {
|
|
951
|
+
this.debugLog("🔧 Disabling pose detection...");
|
|
952
|
+
this.activeFeatures.delete("poseDetection");
|
|
953
|
+
this.results.pose = null;
|
|
954
|
+
const newFeatures = Array.from(this.activeFeatures);
|
|
955
|
+
await this.postToCV("config", { features: newFeatures });
|
|
956
|
+
this.debugLog("✅ Pose detection disabled and cleaned up");
|
|
957
|
+
} catch (error) {
|
|
958
|
+
this.debugLog("❌ Failed to disable pose detection:", error);
|
|
959
|
+
throw error;
|
|
960
|
+
}
|
|
961
|
+
}
|
|
962
|
+
/**
|
|
963
|
+
* Enable body segmentation feature
|
|
964
|
+
*/
|
|
965
|
+
async enableBodySegmentation() {
|
|
966
|
+
if (this.activeFeatures.has("bodySegmentation")) return;
|
|
967
|
+
try {
|
|
968
|
+
this.debugLog("🔧 Enabling body segmentation...");
|
|
969
|
+
await this.ensureCVWorker();
|
|
970
|
+
const newFeatures = Array.from(this.activeFeatures).concat(["bodySegmentation"]);
|
|
971
|
+
await this.postToCV("config", { features: newFeatures });
|
|
972
|
+
this.activeFeatures.add("bodySegmentation");
|
|
973
|
+
this.debugLog("✅ Body segmentation enabled");
|
|
974
|
+
} catch (error) {
|
|
975
|
+
this.debugLog("❌ Failed to enable body segmentation:", error);
|
|
976
|
+
throw error;
|
|
977
|
+
}
|
|
978
|
+
}
|
|
979
|
+
/**
|
|
980
|
+
* Disable body segmentation and cleanup
|
|
981
|
+
*/
|
|
982
|
+
async disableBodySegmentation() {
|
|
983
|
+
if (!this.activeFeatures.has("bodySegmentation")) return;
|
|
984
|
+
try {
|
|
985
|
+
this.debugLog("🔧 Disabling body segmentation...");
|
|
986
|
+
this.activeFeatures.delete("bodySegmentation");
|
|
987
|
+
this.results.segmentation = null;
|
|
988
|
+
const newFeatures = Array.from(this.activeFeatures);
|
|
989
|
+
await this.postToCV("config", { features: newFeatures });
|
|
990
|
+
this.debugLog("✅ Body segmentation disabled and cleaned up");
|
|
991
|
+
} catch (error) {
|
|
992
|
+
this.debugLog("❌ Failed to disable body segmentation:", error);
|
|
993
|
+
throw error;
|
|
994
|
+
}
|
|
995
|
+
}
|
|
996
|
+
/**
|
|
997
|
+
* Process video frame with active CV features
|
|
998
|
+
*/
|
|
999
|
+
async processFrame(bitmap) {
|
|
1000
|
+
if (this.processing || this.activeFeatures.size === 0) {
|
|
1001
|
+
return;
|
|
1002
|
+
}
|
|
1003
|
+
this.cvFrameCounter++;
|
|
1004
|
+
const shouldProcess = this.shouldProcessFrame();
|
|
1005
|
+
if (!shouldProcess) {
|
|
1006
|
+
return;
|
|
1007
|
+
}
|
|
1008
|
+
this.processing = true;
|
|
1009
|
+
this.processingStartTime = performance.now();
|
|
1010
|
+
this.trackCVFrameRate();
|
|
1011
|
+
this.debugLog(`🎬 Processing frame ${this.cvFrameCounter} with features:`, Array.from(this.activeFeatures));
|
|
1012
|
+
try {
|
|
1013
|
+
const features = Array.from(this.activeFeatures);
|
|
1014
|
+
const timestamp = performance.now();
|
|
1015
|
+
const processPromise = this.postToCV("process", {
|
|
1016
|
+
bitmap,
|
|
1017
|
+
timestamp,
|
|
1018
|
+
features
|
|
1019
|
+
}, [bitmap]);
|
|
1020
|
+
const timeoutPromise = new Promise((_, reject) => {
|
|
1021
|
+
setTimeout(() => reject(new Error("CV processing timeout")), 500);
|
|
1022
|
+
});
|
|
1023
|
+
const results = await Promise.race([processPromise, timeoutPromise]);
|
|
1024
|
+
if (results.faces && (this.activeFeatures.has("faceDetection") || this.activeFeatures.has("faceMesh"))) {
|
|
1025
|
+
this.results.faces = results.faces;
|
|
1026
|
+
this.debugLog(`📥 Received ${results.faces.length} face results`);
|
|
1027
|
+
}
|
|
1028
|
+
if (results.hands && this.activeFeatures.has("handTracking")) {
|
|
1029
|
+
this.results.hands = results.hands;
|
|
1030
|
+
this.debugLog(`📥 Received ${results.hands.length} hand results`);
|
|
1031
|
+
}
|
|
1032
|
+
if (results.pose && this.activeFeatures.has("poseDetection")) {
|
|
1033
|
+
this.results.pose = results.pose;
|
|
1034
|
+
this.debugLog(`📥 Received pose results with ${results.pose.landmarks.length} landmarks`);
|
|
1035
|
+
}
|
|
1036
|
+
if (results.segmentation && this.activeFeatures.has("bodySegmentation")) {
|
|
1037
|
+
this.results.segmentation = results.segmentation;
|
|
1038
|
+
this.debugLog(`📥 Received segmentation results ${results.segmentation.width}x${results.segmentation.height}`);
|
|
1039
|
+
}
|
|
1040
|
+
const processingTime = performance.now() - this.processingStartTime;
|
|
1041
|
+
this.processingTimes.push(processingTime);
|
|
1042
|
+
if (this.processingTimes.length > 30) {
|
|
1043
|
+
this.processingTimes.shift();
|
|
1044
|
+
}
|
|
1045
|
+
} catch (error) {
|
|
1046
|
+
this.debugLog("⚠️ CV processing failed:", error);
|
|
1047
|
+
} finally {
|
|
1048
|
+
this.processing = false;
|
|
1049
|
+
}
|
|
1050
|
+
}
|
|
1051
|
+
/**
|
|
1052
|
+
* Check if current frame should be processed based on CV frame rate mode
|
|
1053
|
+
*/
|
|
1054
|
+
shouldProcessFrame() {
|
|
1055
|
+
const divisor = this.getFrameRateDivisor();
|
|
1056
|
+
return this.cvFrameCounter % divisor === 0;
|
|
1057
|
+
}
|
|
1058
|
+
/**
|
|
1059
|
+
* Track CV processing frame rate (similar to main core)
|
|
1060
|
+
*/
|
|
1061
|
+
trackCVFrameRate() {
|
|
1062
|
+
const now = performance.now();
|
|
1063
|
+
if (this.lastCVFrameTime > 0) {
|
|
1064
|
+
const deltaTime = now - this.lastCVFrameTime;
|
|
1065
|
+
this.cvFrameTimes.push(deltaTime);
|
|
1066
|
+
if (this.cvFrameTimes.length > 30) {
|
|
1067
|
+
this.cvFrameTimes.shift();
|
|
1068
|
+
}
|
|
1069
|
+
if (this.cvFrameTimes.length > 0) {
|
|
1070
|
+
const avgDeltaTime = this.cvFrameTimes.reduce((a, b) => a + b, 0) / this.cvFrameTimes.length;
|
|
1071
|
+
this.actualCVFPS = Math.round(1e3 / avgDeltaTime);
|
|
1072
|
+
}
|
|
1073
|
+
}
|
|
1074
|
+
this.lastCVFrameTime = now;
|
|
1075
|
+
}
|
|
1076
|
+
/**
|
|
1077
|
+
* Get frame rate divisor based on current mode
|
|
1078
|
+
*/
|
|
1079
|
+
getFrameRateDivisor() {
|
|
1080
|
+
switch (this.cvFrameRateMode) {
|
|
1081
|
+
case "full":
|
|
1082
|
+
return 1;
|
|
1083
|
+
case "half":
|
|
1084
|
+
return 2;
|
|
1085
|
+
case "quarter":
|
|
1086
|
+
return 4;
|
|
1087
|
+
case "eighth":
|
|
1088
|
+
return 8;
|
|
1089
|
+
default:
|
|
1090
|
+
return 4;
|
|
1091
|
+
}
|
|
1092
|
+
}
|
|
1093
|
+
/**
|
|
1094
|
+
* Get current CV results
|
|
1095
|
+
*/
|
|
1096
|
+
getResults() {
|
|
1097
|
+
return { ...this.results };
|
|
1098
|
+
}
|
|
1099
|
+
/**
|
|
1100
|
+
* Get processing statistics
|
|
1101
|
+
*/
|
|
1102
|
+
getStats() {
|
|
1103
|
+
const avgProcessingTime = this.processingTimes.length > 0 ? this.processingTimes.reduce((a, b) => a + b, 0) / this.processingTimes.length : 0;
|
|
1104
|
+
const targetFPS = this.sceneTargetFPS / this.getFrameRateDivisor();
|
|
1105
|
+
return {
|
|
1106
|
+
activeFeatures: Array.from(this.activeFeatures),
|
|
1107
|
+
processingTime: avgProcessingTime,
|
|
1108
|
+
effectiveFPS: targetFPS,
|
|
1109
|
+
actualFPS: this.actualCVFPS,
|
|
1110
|
+
// Add actual measured CV FPS
|
|
1111
|
+
isProcessing: this.processing
|
|
1112
|
+
};
|
|
1113
|
+
}
|
|
1114
|
+
/**
|
|
1115
|
+
* Check WebGL context availability before enabling features
|
|
1116
|
+
*/
|
|
1117
|
+
checkWebGLContextAvailability() {
|
|
1118
|
+
try {
|
|
1119
|
+
const canvas = new OffscreenCanvas(1, 1);
|
|
1120
|
+
const gl = canvas.getContext("webgl");
|
|
1121
|
+
if (!gl) {
|
|
1122
|
+
this.debugLog("⚠️ WebGL contexts may be exhausted");
|
|
1123
|
+
return false;
|
|
1124
|
+
}
|
|
1125
|
+
const ext = gl.getExtension("WEBGL_lose_context");
|
|
1126
|
+
if (ext) ext.loseContext();
|
|
1127
|
+
return true;
|
|
1128
|
+
} catch (error) {
|
|
1129
|
+
this.debugLog("⚠️ WebGL context check failed:", error);
|
|
1130
|
+
return false;
|
|
1131
|
+
}
|
|
1132
|
+
}
|
|
1133
|
+
/**
|
|
1134
|
+
* Get CV control interface for artist API
|
|
1135
|
+
*/
|
|
1136
|
+
getControlInterface() {
|
|
1137
|
+
return {
|
|
1138
|
+
enableFaceDetection: (enabled) => {
|
|
1139
|
+
if (enabled === false) {
|
|
1140
|
+
return this.disableFaceDetection();
|
|
1141
|
+
} else {
|
|
1142
|
+
return this.enableFaceDetection();
|
|
1143
|
+
}
|
|
1144
|
+
},
|
|
1145
|
+
disableFaceDetection: () => this.disableFaceDetection(),
|
|
1146
|
+
enableFaceMesh: (enabled) => {
|
|
1147
|
+
if (enabled === false) {
|
|
1148
|
+
return this.disableFaceMesh();
|
|
1149
|
+
} else {
|
|
1150
|
+
return this.enableFaceMesh();
|
|
1151
|
+
}
|
|
1152
|
+
},
|
|
1153
|
+
disableFaceMesh: () => this.disableFaceMesh(),
|
|
1154
|
+
enableHandTracking: (enabled) => {
|
|
1155
|
+
if (enabled === false) {
|
|
1156
|
+
return this.disableHandTracking();
|
|
1157
|
+
} else {
|
|
1158
|
+
return this.enableHandTracking();
|
|
1159
|
+
}
|
|
1160
|
+
},
|
|
1161
|
+
disableHandTracking: () => this.disableHandTracking(),
|
|
1162
|
+
enablePoseDetection: (enabled) => {
|
|
1163
|
+
if (enabled === false) {
|
|
1164
|
+
return this.disablePoseDetection();
|
|
1165
|
+
} else {
|
|
1166
|
+
return this.enablePoseDetection();
|
|
1167
|
+
}
|
|
1168
|
+
},
|
|
1169
|
+
disablePoseDetection: () => this.disablePoseDetection(),
|
|
1170
|
+
enableBodySegmentation: (enabled) => {
|
|
1171
|
+
if (enabled === false) {
|
|
1172
|
+
return this.disableBodySegmentation();
|
|
1173
|
+
} else {
|
|
1174
|
+
return this.enableBodySegmentation();
|
|
1175
|
+
}
|
|
1176
|
+
},
|
|
1177
|
+
disableBodySegmentation: () => this.disableBodySegmentation(),
|
|
1178
|
+
getActiveFeatures: () => Array.from(this.activeFeatures),
|
|
1179
|
+
isProcessing: () => this.processing,
|
|
1180
|
+
getStats: () => this.getStats(),
|
|
1181
|
+
getWorkerStatus: () => ({
|
|
1182
|
+
healthy: !!this.cvWorker,
|
|
1183
|
+
restartCount: this.workerRestartCount,
|
|
1184
|
+
maxRestarts: this.maxWorkerRestarts
|
|
1185
|
+
}),
|
|
1186
|
+
restartWorker: () => this.handleWorkerFailure("Manual restart requested"),
|
|
1187
|
+
// WebGL context monitoring
|
|
1188
|
+
checkWebGLAvailability: () => this.checkWebGLContextAvailability(),
|
|
1189
|
+
getResourceUsage: () => ({
|
|
1190
|
+
activeFeatures: this.activeFeatures.size,
|
|
1191
|
+
estimatedWebGLContexts: this.activeFeatures.size * 2,
|
|
1192
|
+
// ~2 contexts per feature
|
|
1193
|
+
webglAvailable: this.checkWebGLContextAvailability()
|
|
1194
|
+
})
|
|
1195
|
+
};
|
|
1196
|
+
}
|
|
1197
|
+
/**
|
|
1198
|
+
* Cleanup all CV resources
|
|
1199
|
+
*/
|
|
1200
|
+
async cleanup() {
|
|
1201
|
+
this.debugLog("🔧 Cleaning up CVSystem...");
|
|
1202
|
+
for (const feature of Array.from(this.activeFeatures)) {
|
|
1203
|
+
switch (feature) {
|
|
1204
|
+
case "faceDetection":
|
|
1205
|
+
await this.disableFaceDetection();
|
|
1206
|
+
break;
|
|
1207
|
+
case "handTracking":
|
|
1208
|
+
await this.disableHandTracking();
|
|
1209
|
+
break;
|
|
1210
|
+
case "poseDetection":
|
|
1211
|
+
await this.disablePoseDetection();
|
|
1212
|
+
break;
|
|
1213
|
+
case "bodySegmentation":
|
|
1214
|
+
await this.disableBodySegmentation();
|
|
1215
|
+
break;
|
|
1216
|
+
}
|
|
1217
|
+
}
|
|
1218
|
+
if (this.cvWorker) {
|
|
1219
|
+
this.cvWorker.terminate();
|
|
1220
|
+
this.cvWorker = null;
|
|
1221
|
+
}
|
|
1222
|
+
this.activeFeatures.clear();
|
|
1223
|
+
this.pendingFeatures.clear();
|
|
1224
|
+
this.workerRestartCount = 0;
|
|
1225
|
+
this.results.faces = [];
|
|
1226
|
+
this.results.hands = [];
|
|
1227
|
+
this.results.pose = null;
|
|
1228
|
+
this.results.segmentation = null;
|
|
1229
|
+
this.processing = false;
|
|
1230
|
+
this.processingTimes = [];
|
|
1231
|
+
this.debugLog("✅ CVSystem cleanup complete");
|
|
1232
|
+
}
|
|
1233
|
+
}
|
|
1234
|
+
class VideoSystem {
|
|
1235
|
+
// ✅ CORRECT: Worker-owned OffscreenCanvas (transferred from host)
|
|
1236
|
+
offscreenCanvas = null;
|
|
1237
|
+
ctx = null;
|
|
1238
|
+
gl = null;
|
|
1239
|
+
// CV processing helpers
|
|
1240
|
+
cvScratchCanvas = null;
|
|
1241
|
+
cvScratchContext = null;
|
|
1242
|
+
// Debug logging control
|
|
1243
|
+
debugMode = false;
|
|
1244
|
+
/**
|
|
1245
|
+
* Enable or disable debug logging
|
|
1246
|
+
*/
|
|
1247
|
+
setDebugMode(enabled) {
|
|
1248
|
+
this.debugMode = enabled;
|
|
1249
|
+
if (this.cvSystem) {
|
|
1250
|
+
this.cvSystem.setDebugMode(enabled);
|
|
1251
|
+
}
|
|
1252
|
+
}
|
|
1253
|
+
/**
|
|
1254
|
+
* Debug logging helper
|
|
1255
|
+
*/
|
|
1256
|
+
debugLog(message, ...args) {
|
|
1257
|
+
if (this.debugMode) {
|
|
1258
|
+
console.log(message, ...args);
|
|
1259
|
+
}
|
|
1260
|
+
}
|
|
1261
|
+
// Frame processing configuration
|
|
1262
|
+
targetFrameRate = 30;
|
|
1263
|
+
// Default target FPS for video processing
|
|
1264
|
+
lastFrameTime = 0;
|
|
1265
|
+
frameInterval = 1e3 / this.targetFrameRate;
|
|
1266
|
+
// ms between frames
|
|
1267
|
+
// Processing state
|
|
1268
|
+
hasLoggedFirstFrame = false;
|
|
1269
|
+
frameCount = 0;
|
|
1270
|
+
// Video state for artist API
|
|
1271
|
+
videoState = {
|
|
1272
|
+
isConnected: false,
|
|
1273
|
+
currentFrame: null,
|
|
1274
|
+
frameWidth: 0,
|
|
1275
|
+
frameHeight: 0,
|
|
1276
|
+
frameRate: 0,
|
|
1277
|
+
frameData: null
|
|
1278
|
+
};
|
|
1279
|
+
// Phase 11 - CV System Integration
|
|
1280
|
+
cvSystem;
|
|
1281
|
+
constructor() {
|
|
1282
|
+
this.cvSystem = new CVSystem();
|
|
1283
|
+
}
|
|
1284
|
+
/**
|
|
1285
|
+
* Get the video API for inclusion in the viji object
|
|
1286
|
+
*/
|
|
1287
|
+
getVideoAPI() {
|
|
1288
|
+
const cvResults = this.cvSystem.getResults();
|
|
1289
|
+
return {
|
|
1290
|
+
isConnected: this.videoState.isConnected,
|
|
1291
|
+
currentFrame: this.videoState.currentFrame,
|
|
1292
|
+
frameWidth: this.videoState.frameWidth,
|
|
1293
|
+
frameHeight: this.videoState.frameHeight,
|
|
1294
|
+
frameRate: this.videoState.frameRate,
|
|
1295
|
+
getFrameData: () => this.videoState.frameData,
|
|
1296
|
+
// CV Results from CVSystem
|
|
1297
|
+
faces: cvResults.faces,
|
|
1298
|
+
hands: cvResults.hands,
|
|
1299
|
+
pose: cvResults.pose,
|
|
1300
|
+
segmentation: cvResults.segmentation,
|
|
1301
|
+
// CV Control Interface
|
|
1302
|
+
cv: this.cvSystem.getControlInterface()
|
|
1303
|
+
};
|
|
1304
|
+
}
|
|
1305
|
+
/**
|
|
1306
|
+
* ✅ CORRECT: Receive OffscreenCanvas transfer from host
|
|
1307
|
+
*/
|
|
1308
|
+
handleCanvasSetup(data) {
|
|
1309
|
+
try {
|
|
1310
|
+
this.disconnectVideo();
|
|
1311
|
+
this.offscreenCanvas = data.offscreenCanvas;
|
|
1312
|
+
this.ctx = this.offscreenCanvas.getContext("2d", {
|
|
1313
|
+
willReadFrequently: true
|
|
1314
|
+
// Optimize for frequent getImageData calls
|
|
1315
|
+
});
|
|
1316
|
+
if (!this.ctx) {
|
|
1317
|
+
throw new Error("Failed to get 2D context from transferred OffscreenCanvas");
|
|
1318
|
+
}
|
|
1319
|
+
try {
|
|
1320
|
+
this.gl = this.offscreenCanvas.getContext("webgl2") || this.offscreenCanvas.getContext("webgl");
|
|
1321
|
+
} catch (e) {
|
|
1322
|
+
this.debugLog("WebGL not available, using 2D context only");
|
|
1323
|
+
}
|
|
1324
|
+
this.videoState.isConnected = true;
|
|
1325
|
+
this.videoState.currentFrame = this.offscreenCanvas;
|
|
1326
|
+
this.videoState.frameWidth = data.width;
|
|
1327
|
+
this.videoState.frameHeight = data.height;
|
|
1328
|
+
this.frameCount = 0;
|
|
1329
|
+
this.hasLoggedFirstFrame = false;
|
|
1330
|
+
this.debugLog("✅ OffscreenCanvas received and setup completed (worker-side)", {
|
|
1331
|
+
width: data.width,
|
|
1332
|
+
height: data.height,
|
|
1333
|
+
hasWebGL: !!this.gl,
|
|
1334
|
+
targetFrameRate: this.targetFrameRate
|
|
1335
|
+
});
|
|
1336
|
+
this.debugLog("🎬 CORRECT OffscreenCanvas approach - Worker has full GPU access!");
|
|
1337
|
+
} catch (error) {
|
|
1338
|
+
console.error("Failed to setup OffscreenCanvas in worker:", error);
|
|
1339
|
+
this.disconnectVideo();
|
|
1340
|
+
}
|
|
1341
|
+
}
|
|
1342
|
+
/**
|
|
1343
|
+
* ✅ CORRECT: Receive ImageBitmap frame and draw to worker's OffscreenCanvas
|
|
1344
|
+
*/
|
|
1345
|
+
handleFrameUpdate(data) {
|
|
1346
|
+
if (!this.offscreenCanvas || !this.ctx) {
|
|
1347
|
+
console.warn("🔴 Received frame but OffscreenCanvas not setup");
|
|
1348
|
+
return;
|
|
1349
|
+
}
|
|
1350
|
+
try {
|
|
1351
|
+
if (this.frameCount % 150 === 0) {
|
|
1352
|
+
this.debugLog("✅ Worker received ImageBitmap frame:", {
|
|
1353
|
+
bitmapSize: `${data.imageBitmap.width}x${data.imageBitmap.height}`,
|
|
1354
|
+
canvasSize: `${this.offscreenCanvas.width}x${this.offscreenCanvas.height}`,
|
|
1355
|
+
frameCount: this.frameCount,
|
|
1356
|
+
timestamp: data.timestamp
|
|
1357
|
+
});
|
|
1358
|
+
}
|
|
1359
|
+
this.ctx.drawImage(data.imageBitmap, 0, 0, this.offscreenCanvas.width, this.offscreenCanvas.height);
|
|
1360
|
+
this.processCurrentFrame(data.timestamp);
|
|
1361
|
+
data.imageBitmap.close();
|
|
1362
|
+
this.frameCount++;
|
|
1363
|
+
} catch (error) {
|
|
1364
|
+
console.error("🔴 Error processing video frame (worker-side):", error);
|
|
1365
|
+
}
|
|
1366
|
+
}
|
|
1367
|
+
/**
|
|
1368
|
+
* Process current frame (called when new frame is drawn)
|
|
1369
|
+
*/
|
|
1370
|
+
async processCurrentFrame(timestamp) {
|
|
1371
|
+
if (!this.offscreenCanvas || !this.ctx) {
|
|
1372
|
+
return;
|
|
1373
|
+
}
|
|
1374
|
+
try {
|
|
1375
|
+
this.videoState.frameData = this.ctx.getImageData(
|
|
1376
|
+
0,
|
|
1377
|
+
0,
|
|
1378
|
+
this.offscreenCanvas.width,
|
|
1379
|
+
this.offscreenCanvas.height
|
|
1380
|
+
);
|
|
1381
|
+
const deltaTime = timestamp - this.lastFrameTime;
|
|
1382
|
+
this.videoState.frameRate = deltaTime > 0 ? 1e3 / deltaTime : 0;
|
|
1383
|
+
if (!this.hasLoggedFirstFrame) {
|
|
1384
|
+
this.debugLog(`🎯 Worker-side OffscreenCanvas processing active: ${this.videoState.frameRate.toFixed(1)} FPS (${this.offscreenCanvas.width}x${this.offscreenCanvas.height})`);
|
|
1385
|
+
this.debugLog("✅ Full GPU access available for custom effects and CV analysis");
|
|
1386
|
+
this.hasLoggedFirstFrame = true;
|
|
1387
|
+
}
|
|
1388
|
+
if (this.videoState.frameData) {
|
|
1389
|
+
try {
|
|
1390
|
+
const bitmap = await createImageBitmap(this.videoState.frameData);
|
|
1391
|
+
this.cvSystem.processFrame(bitmap);
|
|
1392
|
+
} catch (bitmapError) {
|
|
1393
|
+
this.debugLog("⚠️ createImageBitmap failed – falling back to reusable CV canvas:", bitmapError);
|
|
1394
|
+
if (!this.cvScratchCanvas || !this.cvScratchContext || this.cvScratchCanvas.width !== this.videoState.frameData.width || this.cvScratchCanvas.height !== this.videoState.frameData.height) {
|
|
1395
|
+
this.cvScratchCanvas = new OffscreenCanvas(
|
|
1396
|
+
this.videoState.frameData.width,
|
|
1397
|
+
this.videoState.frameData.height
|
|
1398
|
+
);
|
|
1399
|
+
this.cvScratchContext = this.cvScratchCanvas.getContext("2d");
|
|
1400
|
+
if (!this.cvScratchContext) {
|
|
1401
|
+
throw new Error("Failed to get 2D context for CV fallback canvas");
|
|
1402
|
+
}
|
|
1403
|
+
}
|
|
1404
|
+
this.cvScratchContext.putImageData(this.videoState.frameData, 0, 0);
|
|
1405
|
+
const fallbackBitmap = this.cvScratchCanvas.transferToImageBitmap();
|
|
1406
|
+
this.cvSystem.processFrame(fallbackBitmap);
|
|
1407
|
+
}
|
|
1408
|
+
}
|
|
1409
|
+
this.lastFrameTime = timestamp;
|
|
1410
|
+
} catch (error) {
|
|
1411
|
+
console.error("Error processing video frame (worker-side):", error);
|
|
1412
|
+
}
|
|
1413
|
+
}
|
|
1414
|
+
/**
|
|
1415
|
+
* Handle video configuration updates (including disconnection and resize)
|
|
1416
|
+
*/
|
|
1417
|
+
handleVideoConfigUpdate(data) {
|
|
1418
|
+
try {
|
|
1419
|
+
if (data.disconnect) {
|
|
1420
|
+
this.disconnectVideo();
|
|
1421
|
+
return;
|
|
1422
|
+
}
|
|
1423
|
+
if (data.width && data.height && this.offscreenCanvas) {
|
|
1424
|
+
this.resizeCanvas(data.width, data.height);
|
|
1425
|
+
}
|
|
1426
|
+
if (data.targetFrameRate) {
|
|
1427
|
+
this.updateProcessingConfig(data.targetFrameRate);
|
|
1428
|
+
}
|
|
1429
|
+
if (data.cvFrameRate) {
|
|
1430
|
+
this.updateCVFrameRate(data.cvFrameRate);
|
|
1431
|
+
}
|
|
1432
|
+
} catch (error) {
|
|
1433
|
+
console.error("Error handling video config update:", error);
|
|
1434
|
+
}
|
|
1435
|
+
}
|
|
1436
|
+
/**
|
|
1437
|
+
* Resize the OffscreenCanvas (when video dimensions change)
|
|
1438
|
+
*/
|
|
1439
|
+
resizeCanvas(width, height) {
|
|
1440
|
+
if (!this.offscreenCanvas) return;
|
|
1441
|
+
try {
|
|
1442
|
+
this.offscreenCanvas.width = width;
|
|
1443
|
+
this.offscreenCanvas.height = height;
|
|
1444
|
+
this.videoState.frameWidth = width;
|
|
1445
|
+
this.videoState.frameHeight = height;
|
|
1446
|
+
if (this.gl) {
|
|
1447
|
+
this.gl.viewport(0, 0, width, height);
|
|
1448
|
+
}
|
|
1449
|
+
this.debugLog(`📐 OffscreenCanvas resized to ${width}x${height} (worker-side)`);
|
|
1450
|
+
} catch (error) {
|
|
1451
|
+
console.error("Error resizing OffscreenCanvas:", error);
|
|
1452
|
+
}
|
|
1453
|
+
}
|
|
1454
|
+
/**
|
|
1455
|
+
* Disconnect video and clean up resources
|
|
1456
|
+
*/
|
|
1457
|
+
disconnectVideo() {
|
|
1458
|
+
if (this.offscreenCanvas && this.ctx) {
|
|
1459
|
+
this.ctx.clearRect(0, 0, this.offscreenCanvas.width, this.offscreenCanvas.height);
|
|
1460
|
+
this.debugLog("🧹 Cleared OffscreenCanvas on disconnect");
|
|
1461
|
+
}
|
|
1462
|
+
this.offscreenCanvas = null;
|
|
1463
|
+
this.ctx = null;
|
|
1464
|
+
this.gl = null;
|
|
1465
|
+
this.videoState.isConnected = false;
|
|
1466
|
+
this.videoState.currentFrame = null;
|
|
1467
|
+
this.videoState.frameWidth = 0;
|
|
1468
|
+
this.videoState.frameHeight = 0;
|
|
1469
|
+
this.videoState.frameRate = 0;
|
|
1470
|
+
this.videoState.frameData = null;
|
|
1471
|
+
this.cvScratchCanvas = null;
|
|
1472
|
+
this.cvScratchContext = null;
|
|
1473
|
+
this.hasLoggedFirstFrame = false;
|
|
1474
|
+
this.frameCount = 0;
|
|
1475
|
+
this.debugLog("Video disconnected (worker-side)");
|
|
1476
|
+
}
|
|
1477
|
+
/**
|
|
1478
|
+
* Update video processing configuration
|
|
1479
|
+
*/
|
|
1480
|
+
updateProcessingConfig(targetFrameRate) {
|
|
1481
|
+
this.targetFrameRate = Math.max(1, Math.min(60, targetFrameRate));
|
|
1482
|
+
this.frameInterval = 1e3 / this.targetFrameRate;
|
|
1483
|
+
this.debugLog(`Video processing frame rate updated to ${this.targetFrameRate} FPS (worker-side)`);
|
|
1484
|
+
}
|
|
1485
|
+
/**
|
|
1486
|
+
* Phase 11 - Update CV frame rate configuration
|
|
1487
|
+
*/
|
|
1488
|
+
updateCVFrameRate(cvFrameRate) {
|
|
1489
|
+
this.cvSystem.updateCVFrameRate(cvFrameRate.mode, cvFrameRate.sceneTargetFPS);
|
|
1490
|
+
this.debugLog(`CV frame rate updated to ${cvFrameRate.mode} of ${cvFrameRate.sceneTargetFPS} FPS (worker-side)`);
|
|
1491
|
+
}
|
|
1492
|
+
/**
|
|
1493
|
+
* Reset all video state (called when loading new scene)
|
|
1494
|
+
*/
|
|
1495
|
+
resetVideoState() {
|
|
1496
|
+
this.disconnectVideo();
|
|
1497
|
+
if (this.cvSystem) {
|
|
1498
|
+
this.cvSystem.cleanup();
|
|
1499
|
+
}
|
|
1500
|
+
}
|
|
1501
|
+
/**
|
|
1502
|
+
* Get current processing configuration
|
|
1503
|
+
*/
|
|
1504
|
+
getProcessingConfig() {
|
|
1505
|
+
return {
|
|
1506
|
+
targetFrameRate: this.targetFrameRate,
|
|
1507
|
+
frameInterval: this.frameInterval,
|
|
1508
|
+
frameCount: this.frameCount
|
|
1509
|
+
};
|
|
1510
|
+
}
|
|
1511
|
+
/**
|
|
1512
|
+
* Get WebGL context for advanced effects (if available)
|
|
1513
|
+
*/
|
|
1514
|
+
getWebGLContext() {
|
|
1515
|
+
return this.gl;
|
|
1516
|
+
}
|
|
1517
|
+
/**
|
|
1518
|
+
* ✅ WORKER API: Artists can access the OffscreenCanvas directly for custom effects
|
|
1519
|
+
*/
|
|
1520
|
+
getCanvasForArtistEffects() {
|
|
1521
|
+
return this.offscreenCanvas;
|
|
1522
|
+
}
|
|
1523
|
+
/**
|
|
1524
|
+
* Get CV processing statistics
|
|
1525
|
+
*/
|
|
1526
|
+
getCVStats() {
|
|
1527
|
+
return this.cvSystem.getStats();
|
|
1528
|
+
}
|
|
1529
|
+
}
|
|
1530
|
+
class VijiWorkerRuntime {
|
|
1531
|
+
canvas = null;
|
|
1532
|
+
ctx = null;
|
|
1533
|
+
gl = null;
|
|
1534
|
+
isRunning = false;
|
|
1535
|
+
frameCount = 0;
|
|
1536
|
+
lastTime = 0;
|
|
1537
|
+
startTime = 0;
|
|
1538
|
+
frameRateMode = "full";
|
|
1539
|
+
skipNextFrame = false;
|
|
1540
|
+
screenRefreshRate = 60;
|
|
1541
|
+
// Will be detected
|
|
1542
|
+
// Debug logging control
|
|
1543
|
+
debugMode = false;
|
|
1544
|
+
// P5.js adapter for P5 mode
|
|
1545
|
+
p5Adapter = null;
|
|
1546
|
+
rendererType = "native";
|
|
1547
|
+
/**
|
|
1548
|
+
* Enable or disable debug logging
|
|
1549
|
+
*/
|
|
1550
|
+
setDebugMode(enabled) {
|
|
1551
|
+
this.debugMode = enabled;
|
|
1552
|
+
if (this.videoSystem) this.videoSystem.setDebugMode(enabled);
|
|
1553
|
+
if (this.parameterSystem && "setDebugMode" in this.parameterSystem) {
|
|
1554
|
+
this.parameterSystem.setDebugMode(enabled);
|
|
1555
|
+
}
|
|
1556
|
+
if (this.interactionSystem && "setDebugMode" in this.interactionSystem) {
|
|
1557
|
+
this.interactionSystem.setDebugMode(enabled);
|
|
1558
|
+
}
|
|
1559
|
+
}
|
|
1560
|
+
/**
|
|
1561
|
+
* Debug logging helper
|
|
1562
|
+
*/
|
|
1563
|
+
debugLog(message, ...args) {
|
|
1564
|
+
if (this.debugMode) {
|
|
1565
|
+
console.log(message, ...args);
|
|
1566
|
+
}
|
|
1567
|
+
}
|
|
1568
|
+
// Effective refresh rate tracking
|
|
1569
|
+
effectiveFrameTimes = [];
|
|
1570
|
+
lastEffectiveRateReport = 0;
|
|
1571
|
+
effectiveRateReportInterval = 1e3;
|
|
1572
|
+
// Report every 1 second
|
|
1573
|
+
// Parameter system
|
|
1574
|
+
parameterSystem;
|
|
1575
|
+
// Interaction system (Phase 7)
|
|
1576
|
+
interactionSystem;
|
|
1577
|
+
// Video system (Phase 10) - worker-side video processing
|
|
1578
|
+
videoSystem;
|
|
1579
|
+
// Audio state (Phase 5) - receives analysis results from host
|
|
1580
|
+
audioState = {
|
|
1581
|
+
isConnected: false,
|
|
1582
|
+
volume: { rms: 0, peak: 0 },
|
|
1583
|
+
bands: {
|
|
1584
|
+
bass: 0,
|
|
1585
|
+
mid: 0,
|
|
1586
|
+
treble: 0,
|
|
1587
|
+
subBass: 0,
|
|
1588
|
+
lowMid: 0,
|
|
1589
|
+
highMid: 0,
|
|
1590
|
+
presence: 0,
|
|
1591
|
+
brilliance: 0
|
|
1592
|
+
},
|
|
1593
|
+
frequencyData: new Uint8Array(0)
|
|
1594
|
+
};
|
|
1595
|
+
// Video state is now managed by the worker-side VideoSystem
|
|
1596
|
+
// Artist API object
|
|
1597
|
+
viji = {
|
|
1598
|
+
// Canvas (will be set during init)
|
|
1599
|
+
canvas: null,
|
|
1600
|
+
ctx: null,
|
|
1601
|
+
gl: null,
|
|
1602
|
+
width: 0,
|
|
1603
|
+
height: 0,
|
|
1604
|
+
pixelRatio: 1,
|
|
1605
|
+
// Timing
|
|
1606
|
+
time: 0,
|
|
1607
|
+
deltaTime: 0,
|
|
1608
|
+
frameCount: 0,
|
|
1609
|
+
fps: 60,
|
|
1610
|
+
// Audio API (Phase 5) - will be set in constructor
|
|
1611
|
+
audio: {},
|
|
1612
|
+
video: {
|
|
1613
|
+
isConnected: false,
|
|
1614
|
+
currentFrame: null,
|
|
1615
|
+
frameWidth: 0,
|
|
1616
|
+
frameHeight: 0,
|
|
1617
|
+
frameRate: 0,
|
|
1618
|
+
getFrameData: () => null,
|
|
1619
|
+
faces: [],
|
|
1620
|
+
hands: []
|
|
1621
|
+
},
|
|
1622
|
+
// Interaction APIs will be added during construction
|
|
1623
|
+
mouse: {},
|
|
1624
|
+
keyboard: {},
|
|
1625
|
+
touches: {},
|
|
1626
|
+
// Parameter helper functions (return parameter objects) - delegate to parameter system
|
|
1627
|
+
slider: (defaultValue, config) => {
|
|
1628
|
+
return this.parameterSystem.createSliderParameter(defaultValue, config);
|
|
1629
|
+
},
|
|
1630
|
+
color: (defaultValue, config) => {
|
|
1631
|
+
return this.parameterSystem.createColorParameter(defaultValue, config);
|
|
1632
|
+
},
|
|
1633
|
+
toggle: (defaultValue, config) => {
|
|
1634
|
+
return this.parameterSystem.createToggleParameter(defaultValue, config);
|
|
1635
|
+
},
|
|
1636
|
+
select: (defaultValue, config) => {
|
|
1637
|
+
return this.parameterSystem.createSelectParameter(defaultValue, config);
|
|
1638
|
+
},
|
|
1639
|
+
text: (defaultValue, config) => {
|
|
1640
|
+
return this.parameterSystem.createTextParameter(defaultValue, config);
|
|
1641
|
+
},
|
|
1642
|
+
number: (defaultValue, config) => {
|
|
1643
|
+
return this.parameterSystem.createNumberParameter(defaultValue, config);
|
|
1644
|
+
},
|
|
1645
|
+
image: (defaultValue, config) => {
|
|
1646
|
+
return this.parameterSystem.createImageParameter(defaultValue, config);
|
|
1647
|
+
},
|
|
1648
|
+
// Context selection
|
|
1649
|
+
useContext: (type) => {
|
|
1650
|
+
if (type === "2d") {
|
|
1651
|
+
if (!this.ctx && this.canvas) {
|
|
1652
|
+
this.ctx = this.canvas.getContext("2d");
|
|
1653
|
+
this.viji.ctx = this.ctx;
|
|
1654
|
+
}
|
|
1655
|
+
return this.ctx;
|
|
1656
|
+
} else if (type === "webgl") {
|
|
1657
|
+
if (!this.gl && this.canvas) {
|
|
1658
|
+
this.gl = this.canvas.getContext("webgl2") || this.canvas.getContext("webgl");
|
|
1659
|
+
this.viji.gl = this.gl;
|
|
1660
|
+
if (this.gl) {
|
|
1661
|
+
this.gl.viewport(0, 0, this.viji.width, this.viji.height);
|
|
1662
|
+
}
|
|
1663
|
+
}
|
|
1664
|
+
return this.gl;
|
|
1665
|
+
}
|
|
1666
|
+
return null;
|
|
1667
|
+
}
|
|
1668
|
+
};
|
|
1669
|
+
constructor() {
|
|
1670
|
+
this.parameterSystem = new ParameterSystem((type, data) => {
|
|
1671
|
+
this.postMessage(type, data);
|
|
1672
|
+
});
|
|
1673
|
+
this.interactionSystem = new InteractionSystem();
|
|
1674
|
+
this.videoSystem = new VideoSystem();
|
|
1675
|
+
Object.assign(this.viji, this.interactionSystem.getInteractionAPIs());
|
|
1676
|
+
Object.assign(this.viji.video, this.videoSystem.getVideoAPI());
|
|
1677
|
+
this.viji.audio = {
|
|
1678
|
+
...this.audioState,
|
|
1679
|
+
getFrequencyData: () => this.audioState.frequencyData
|
|
1680
|
+
};
|
|
1681
|
+
this.setupMessageHandling();
|
|
1682
|
+
}
|
|
1683
|
+
/**
|
|
1684
|
+
* Initialize P5.js mode
|
|
1685
|
+
* Lazy-loads P5WorkerAdapter and sets up P5 rendering
|
|
1686
|
+
*/
|
|
1687
|
+
async initP5Mode(setup, render) {
|
|
1688
|
+
try {
|
|
1689
|
+
this.rendererType = "p5";
|
|
1690
|
+
this.debugLog("🎨 Initializing P5.js mode...");
|
|
1691
|
+
const { P5WorkerAdapter } = await import("./P5WorkerAdapter-bO_02bv6.js");
|
|
1692
|
+
this.p5Adapter = new P5WorkerAdapter(
|
|
1693
|
+
this.canvas,
|
|
1694
|
+
this.viji,
|
|
1695
|
+
{
|
|
1696
|
+
setup,
|
|
1697
|
+
render
|
|
1698
|
+
}
|
|
1699
|
+
);
|
|
1700
|
+
await this.p5Adapter.init();
|
|
1701
|
+
this.debugLog("✅ P5.js mode initialized successfully");
|
|
1702
|
+
} catch (error) {
|
|
1703
|
+
console.error("❌ Failed to initialize P5.js mode:", error);
|
|
1704
|
+
this.postMessage("error", {
|
|
1705
|
+
message: `P5.js initialization failed: ${error.message}`,
|
|
1706
|
+
code: "P5_INIT_ERROR"
|
|
1707
|
+
});
|
|
1708
|
+
this.rendererType = "native";
|
|
1709
|
+
this.p5Adapter = null;
|
|
1710
|
+
}
|
|
1711
|
+
}
|
|
1712
|
+
// Reset parameter state (called when loading new scene)
|
|
1713
|
+
resetParameterState() {
|
|
1714
|
+
this.parameterSystem.resetParameterState();
|
|
1715
|
+
this.interactionSystem.resetInteractionState();
|
|
1716
|
+
this.audioState = {
|
|
1717
|
+
isConnected: false,
|
|
1718
|
+
volume: { rms: 0, peak: 0 },
|
|
1719
|
+
bands: {
|
|
1720
|
+
bass: 0,
|
|
1721
|
+
mid: 0,
|
|
1722
|
+
treble: 0,
|
|
1723
|
+
subBass: 0,
|
|
1724
|
+
lowMid: 0,
|
|
1725
|
+
highMid: 0,
|
|
1726
|
+
presence: 0,
|
|
1727
|
+
brilliance: 0
|
|
1728
|
+
},
|
|
1729
|
+
frequencyData: new Uint8Array(0)
|
|
1730
|
+
};
|
|
1731
|
+
this.viji.audio = {
|
|
1732
|
+
...this.audioState,
|
|
1733
|
+
getFrequencyData: () => this.audioState.frequencyData
|
|
1734
|
+
};
|
|
1735
|
+
this.videoSystem.resetVideoState();
|
|
1736
|
+
Object.assign(this.viji.video, this.videoSystem.getVideoAPI());
|
|
1737
|
+
}
|
|
1738
|
+
// Send all parameters (from helper functions) to host
|
|
1739
|
+
sendAllParametersToHost() {
|
|
1740
|
+
this.parameterSystem.sendAllParametersToHost();
|
|
1741
|
+
}
|
|
1742
|
+
setupMessageHandling() {
|
|
1743
|
+
self.onmessage = (event) => {
|
|
1744
|
+
const message = event.data;
|
|
1745
|
+
switch (message.type) {
|
|
1746
|
+
case "init":
|
|
1747
|
+
this.handleInit(message);
|
|
1748
|
+
break;
|
|
1749
|
+
case "frame-rate-update":
|
|
1750
|
+
this.handleFrameRateUpdate(message);
|
|
1751
|
+
break;
|
|
1752
|
+
case "refresh-rate-update":
|
|
1753
|
+
this.handleRefreshRateUpdate(message);
|
|
1754
|
+
break;
|
|
1755
|
+
case "cv-frame-rate-update":
|
|
1756
|
+
this.handleCVFrameRateUpdate(message);
|
|
1757
|
+
break;
|
|
1758
|
+
case "resolution-update":
|
|
1759
|
+
this.handleResolutionUpdate(message);
|
|
1760
|
+
break;
|
|
1761
|
+
case "set-scene-code":
|
|
1762
|
+
this.handleSetSceneCode(message);
|
|
1763
|
+
break;
|
|
1764
|
+
case "debug-mode":
|
|
1765
|
+
this.setDebugMode(message.data.enabled);
|
|
1766
|
+
break;
|
|
1767
|
+
case "parameter-update":
|
|
1768
|
+
this.handleParameterUpdate(message);
|
|
1769
|
+
break;
|
|
1770
|
+
case "parameter-batch-update":
|
|
1771
|
+
this.handleParameterBatchUpdate(message);
|
|
1772
|
+
break;
|
|
1773
|
+
case "stream-update":
|
|
1774
|
+
this.handleStreamUpdate(message);
|
|
1775
|
+
break;
|
|
1776
|
+
case "audio-analysis-update":
|
|
1777
|
+
this.handleAudioAnalysisUpdate(message);
|
|
1778
|
+
break;
|
|
1779
|
+
case "video-canvas-setup":
|
|
1780
|
+
this.handleVideoCanvasSetup(message);
|
|
1781
|
+
break;
|
|
1782
|
+
case "video-frame-update":
|
|
1783
|
+
this.handleVideoFrameUpdate(message);
|
|
1784
|
+
break;
|
|
1785
|
+
case "video-config-update":
|
|
1786
|
+
this.handleVideoConfigUpdate(message);
|
|
1787
|
+
break;
|
|
1788
|
+
case "mouse-update":
|
|
1789
|
+
this.handleMouseUpdate(message);
|
|
1790
|
+
break;
|
|
1791
|
+
case "keyboard-update":
|
|
1792
|
+
this.handleKeyboardUpdate(message);
|
|
1793
|
+
break;
|
|
1794
|
+
case "touch-update":
|
|
1795
|
+
this.handleTouchUpdate(message);
|
|
1796
|
+
break;
|
|
1797
|
+
case "interaction-enabled":
|
|
1798
|
+
this.handleInteractionEnabled(message);
|
|
1799
|
+
break;
|
|
1800
|
+
case "performance-update":
|
|
1801
|
+
this.handlePerformanceUpdate(message);
|
|
1802
|
+
break;
|
|
1803
|
+
case "capture-frame":
|
|
1804
|
+
this.handleCaptureFrame(message);
|
|
1805
|
+
break;
|
|
1806
|
+
}
|
|
1807
|
+
};
|
|
1808
|
+
}
|
|
1809
|
+
handleInit(message) {
|
|
1810
|
+
try {
|
|
1811
|
+
this.canvas = message.data.canvas;
|
|
1812
|
+
this.viji.canvas = this.canvas;
|
|
1813
|
+
this.viji.width = this.canvas.width;
|
|
1814
|
+
this.viji.height = this.canvas.height;
|
|
1815
|
+
this.startRenderLoop();
|
|
1816
|
+
this.postMessage("init-response", {
|
|
1817
|
+
id: message.id
|
|
1818
|
+
});
|
|
1819
|
+
this.postMessage("ready", {
|
|
1820
|
+
rendererType: this.rendererType,
|
|
1821
|
+
canvasSize: { width: this.canvas.width, height: this.canvas.height }
|
|
1822
|
+
});
|
|
1823
|
+
} catch (error) {
|
|
1824
|
+
this.postMessage("error", {
|
|
1825
|
+
id: message.id,
|
|
1826
|
+
message: error.message,
|
|
1827
|
+
code: "INIT_ERROR"
|
|
1828
|
+
});
|
|
1829
|
+
}
|
|
1830
|
+
}
|
|
1831
|
+
handleFrameRateUpdate(message) {
|
|
1832
|
+
if (message.data && message.data.mode) {
|
|
1833
|
+
this.frameRateMode = message.data.mode;
|
|
1834
|
+
this.debugLog("Frame rate mode updated to:", message.data.mode);
|
|
1835
|
+
}
|
|
1836
|
+
}
|
|
1837
|
+
handleRefreshRateUpdate(message) {
|
|
1838
|
+
if (message.data && message.data.screenRefreshRate) {
|
|
1839
|
+
this.screenRefreshRate = message.data.screenRefreshRate;
|
|
1840
|
+
this.debugLog("Screen refresh rate updated to:", message.data.screenRefreshRate + "Hz");
|
|
1841
|
+
}
|
|
1842
|
+
}
|
|
1843
|
+
handleCVFrameRateUpdate(message) {
|
|
1844
|
+
if (message.data && message.data.mode) {
|
|
1845
|
+
const sceneProcessingFPS = this.frameRateMode === "full" ? this.screenRefreshRate : this.screenRefreshRate / 2;
|
|
1846
|
+
if (this.videoSystem) {
|
|
1847
|
+
this.videoSystem.handleVideoConfigUpdate({
|
|
1848
|
+
cvFrameRate: {
|
|
1849
|
+
mode: message.data.mode,
|
|
1850
|
+
sceneTargetFPS: sceneProcessingFPS
|
|
1851
|
+
},
|
|
1852
|
+
timestamp: performance.now()
|
|
1853
|
+
});
|
|
1854
|
+
}
|
|
1855
|
+
this.debugLog(`CV frame rate updated to: ${message.data.mode} of ${sceneProcessingFPS} FPS scene processing`);
|
|
1856
|
+
}
|
|
1857
|
+
}
|
|
1858
|
+
trackEffectiveFrameTime(currentTime) {
|
|
1859
|
+
this.effectiveFrameTimes.push(currentTime);
|
|
1860
|
+
if (this.effectiveFrameTimes.length > 60) {
|
|
1861
|
+
this.effectiveFrameTimes.shift();
|
|
1862
|
+
}
|
|
1863
|
+
}
|
|
1864
|
+
reportPerformanceStats(currentTime) {
|
|
1865
|
+
if (currentTime - this.lastEffectiveRateReport >= this.effectiveRateReportInterval) {
|
|
1866
|
+
if (this.effectiveFrameTimes.length >= 2) {
|
|
1867
|
+
const totalTime = this.effectiveFrameTimes[this.effectiveFrameTimes.length - 1] - this.effectiveFrameTimes[0];
|
|
1868
|
+
const frameCount = this.effectiveFrameTimes.length - 1;
|
|
1869
|
+
const effectiveRefreshRate = Math.round(frameCount / totalTime * 1e3);
|
|
1870
|
+
const cvStats = this.videoSystem.getCVStats();
|
|
1871
|
+
this.postMessage("performance-update", {
|
|
1872
|
+
effectiveRefreshRate,
|
|
1873
|
+
frameRateMode: this.frameRateMode,
|
|
1874
|
+
screenRefreshRate: this.screenRefreshRate,
|
|
1875
|
+
rendererType: this.rendererType,
|
|
1876
|
+
parameterCount: this.parameterSystem.getParameterCount(),
|
|
1877
|
+
// Include CV stats if available
|
|
1878
|
+
cv: cvStats ? {
|
|
1879
|
+
activeFeatures: cvStats.activeFeatures,
|
|
1880
|
+
processingTime: cvStats.processingTime,
|
|
1881
|
+
targetFPS: cvStats.effectiveFPS,
|
|
1882
|
+
actualFPS: cvStats.actualFPS,
|
|
1883
|
+
isProcessing: cvStats.isProcessing
|
|
1884
|
+
} : void 0
|
|
1885
|
+
});
|
|
1886
|
+
}
|
|
1887
|
+
this.lastEffectiveRateReport = currentTime;
|
|
1888
|
+
}
|
|
1889
|
+
}
|
|
1890
|
+
handleResolutionUpdate(message) {
|
|
1891
|
+
if (message.data) {
|
|
1892
|
+
if (this.canvas) {
|
|
1893
|
+
this.canvas.width = Math.round(message.data.effectiveWidth);
|
|
1894
|
+
this.canvas.height = Math.round(message.data.effectiveHeight);
|
|
1895
|
+
}
|
|
1896
|
+
this.viji.width = Math.round(message.data.effectiveWidth);
|
|
1897
|
+
this.viji.height = Math.round(message.data.effectiveHeight);
|
|
1898
|
+
if (this.gl) {
|
|
1899
|
+
this.gl.viewport(0, 0, this.viji.width, this.viji.height);
|
|
1900
|
+
}
|
|
1901
|
+
if (this.p5Adapter && this.rendererType === "p5") {
|
|
1902
|
+
this.p5Adapter.resize(this.viji.width, this.viji.height);
|
|
1903
|
+
}
|
|
1904
|
+
this.debugLog("Canvas resolution updated to:", this.viji.width + "x" + this.viji.height);
|
|
1905
|
+
}
|
|
1906
|
+
}
|
|
1907
|
+
handleParameterUpdate(message) {
|
|
1908
|
+
if (message.data && message.data.name !== void 0 && message.data.value !== void 0) {
|
|
1909
|
+
this.parameterSystem.updateParameterValue(message.data.name, message.data.value);
|
|
1910
|
+
}
|
|
1911
|
+
}
|
|
1912
|
+
handleParameterBatchUpdate(message) {
|
|
1913
|
+
if (message.data && message.data.updates) {
|
|
1914
|
+
for (const update of message.data.updates) {
|
|
1915
|
+
this.parameterSystem.updateParameterValue(update.name, update.value);
|
|
1916
|
+
}
|
|
1917
|
+
this.parameterSystem.markInitialValuesSynced();
|
|
1918
|
+
this.debugLog("Parameter system initialized successfully");
|
|
1919
|
+
}
|
|
1920
|
+
}
|
|
1921
|
+
handleStreamUpdate(message) {
|
|
1922
|
+
this.debugLog("Stream update:", message.data);
|
|
1923
|
+
}
|
|
1924
|
+
handleAudioAnalysisUpdate(message) {
|
|
1925
|
+
this.audioState = {
|
|
1926
|
+
isConnected: message.data.isConnected,
|
|
1927
|
+
volume: message.data.volume,
|
|
1928
|
+
bands: message.data.bands,
|
|
1929
|
+
frequencyData: new Uint8Array(message.data.frequencyData)
|
|
1930
|
+
};
|
|
1931
|
+
this.viji.audio = {
|
|
1932
|
+
...this.audioState,
|
|
1933
|
+
getFrequencyData: () => this.audioState.frequencyData
|
|
1934
|
+
};
|
|
1935
|
+
}
|
|
1936
|
+
handleVideoCanvasSetup(message) {
|
|
1937
|
+
this.videoSystem.handleCanvasSetup({
|
|
1938
|
+
offscreenCanvas: message.data.offscreenCanvas,
|
|
1939
|
+
width: message.data.width,
|
|
1940
|
+
height: message.data.height,
|
|
1941
|
+
timestamp: message.data.timestamp
|
|
1942
|
+
});
|
|
1943
|
+
Object.assign(this.viji.video, this.videoSystem.getVideoAPI());
|
|
1944
|
+
}
|
|
1945
|
+
handleVideoFrameUpdate(message) {
|
|
1946
|
+
this.videoSystem.handleFrameUpdate({
|
|
1947
|
+
imageBitmap: message.data.imageBitmap,
|
|
1948
|
+
timestamp: message.data.timestamp
|
|
1949
|
+
});
|
|
1950
|
+
Object.assign(this.viji.video, this.videoSystem.getVideoAPI());
|
|
1951
|
+
}
|
|
1952
|
+
handleVideoConfigUpdate(message) {
|
|
1953
|
+
this.videoSystem.handleVideoConfigUpdate({
|
|
1954
|
+
...message.data.targetFrameRate && { targetFrameRate: message.data.targetFrameRate },
|
|
1955
|
+
...message.data.cvConfig && { cvConfig: message.data.cvConfig },
|
|
1956
|
+
...message.data.width && { width: message.data.width },
|
|
1957
|
+
...message.data.height && { height: message.data.height },
|
|
1958
|
+
...message.data.disconnect && { disconnect: message.data.disconnect },
|
|
1959
|
+
timestamp: message.data.timestamp
|
|
1960
|
+
});
|
|
1961
|
+
Object.assign(this.viji.video, this.videoSystem.getVideoAPI());
|
|
1962
|
+
}
|
|
1963
|
+
handlePerformanceUpdate(message) {
|
|
1964
|
+
this.debugLog("Performance update:", message.data);
|
|
1965
|
+
}
|
|
1966
|
+
/**
|
|
1967
|
+
* Handle capture-frame request from host.
|
|
1968
|
+
* Produces an ArrayBuffer (image bytes) to send back as transferable.
|
|
1969
|
+
*/
|
|
1970
|
+
async handleCaptureFrame(message) {
|
|
1971
|
+
try {
|
|
1972
|
+
if (!this.canvas) {
|
|
1973
|
+
throw new Error("Canvas not initialized");
|
|
1974
|
+
}
|
|
1975
|
+
const mimeType = message.data.type || "image/jpeg";
|
|
1976
|
+
const srcWidth = this.canvas.width;
|
|
1977
|
+
const srcHeight = this.canvas.height;
|
|
1978
|
+
let targetWidth = srcWidth;
|
|
1979
|
+
let targetHeight = srcHeight;
|
|
1980
|
+
if (typeof message.data.resolution === "number") {
|
|
1981
|
+
const scale = message.data.resolution > 0 ? message.data.resolution : 1;
|
|
1982
|
+
targetWidth = Math.max(1, Math.floor(srcWidth * scale));
|
|
1983
|
+
targetHeight = Math.max(1, Math.floor(srcHeight * scale));
|
|
1984
|
+
} else if (message.data.resolution && typeof message.data.resolution === "object") {
|
|
1985
|
+
targetWidth = Math.max(1, Math.floor(message.data.resolution.width));
|
|
1986
|
+
targetHeight = Math.max(1, Math.floor(message.data.resolution.height));
|
|
1987
|
+
}
|
|
1988
|
+
const srcAspect = srcWidth / srcHeight;
|
|
1989
|
+
const dstAspect = targetWidth / targetHeight;
|
|
1990
|
+
let sx = 0;
|
|
1991
|
+
let sy = 0;
|
|
1992
|
+
let sWidth = srcWidth;
|
|
1993
|
+
let sHeight = srcHeight;
|
|
1994
|
+
if (Math.abs(srcAspect - dstAspect) > 1e-6) {
|
|
1995
|
+
if (dstAspect > srcAspect) {
|
|
1996
|
+
sHeight = Math.floor(srcWidth / dstAspect);
|
|
1997
|
+
sy = Math.floor((srcHeight - sHeight) / 2);
|
|
1998
|
+
} else {
|
|
1999
|
+
sWidth = Math.floor(srcHeight * dstAspect);
|
|
2000
|
+
sx = Math.floor((srcWidth - sWidth) / 2);
|
|
2001
|
+
}
|
|
2002
|
+
}
|
|
2003
|
+
const temp = new OffscreenCanvas(targetWidth, targetHeight);
|
|
2004
|
+
const tctx = temp.getContext("2d");
|
|
2005
|
+
if (!tctx) throw new Error("Failed to get 2D context");
|
|
2006
|
+
tctx.drawImage(this.canvas, sx, sy, sWidth, sHeight, 0, 0, targetWidth, targetHeight);
|
|
2007
|
+
const blob = await temp.convertToBlob({ type: mimeType });
|
|
2008
|
+
const arrayBuffer = await blob.arrayBuffer();
|
|
2009
|
+
self.postMessage({
|
|
2010
|
+
type: "capture-frame-result",
|
|
2011
|
+
id: message.id,
|
|
2012
|
+
timestamp: Date.now(),
|
|
2013
|
+
data: {
|
|
2014
|
+
mimeType,
|
|
2015
|
+
buffer: arrayBuffer,
|
|
2016
|
+
width: targetWidth,
|
|
2017
|
+
height: targetHeight
|
|
2018
|
+
}
|
|
2019
|
+
}, [arrayBuffer]);
|
|
2020
|
+
} catch (error) {
|
|
2021
|
+
this.postMessage("error", {
|
|
2022
|
+
id: message.id,
|
|
2023
|
+
message: error.message,
|
|
2024
|
+
code: "CAPTURE_FRAME_ERROR"
|
|
2025
|
+
});
|
|
2026
|
+
}
|
|
2027
|
+
}
|
|
2028
|
+
handleSetSceneCode(message) {
|
|
2029
|
+
if (message.data && message.data.sceneCode) {
|
|
2030
|
+
self.setSceneCode(message.data.sceneCode);
|
|
2031
|
+
}
|
|
2032
|
+
}
|
|
2033
|
+
startRenderLoop() {
|
|
2034
|
+
this.isRunning = true;
|
|
2035
|
+
this.startTime = performance.now();
|
|
2036
|
+
this.lastTime = this.startTime;
|
|
2037
|
+
this.renderFrame();
|
|
2038
|
+
}
|
|
2039
|
+
renderFrame() {
|
|
2040
|
+
if (!this.isRunning) return;
|
|
2041
|
+
const currentTime = performance.now();
|
|
2042
|
+
this.interactionSystem.frameStart();
|
|
2043
|
+
this.viji.fps = this.frameRateMode === "full" ? this.screenRefreshRate : this.screenRefreshRate / 2;
|
|
2044
|
+
let shouldRender = true;
|
|
2045
|
+
if (this.frameRateMode === "half") {
|
|
2046
|
+
shouldRender = !this.skipNextFrame;
|
|
2047
|
+
this.skipNextFrame = !this.skipNextFrame;
|
|
2048
|
+
}
|
|
2049
|
+
if (shouldRender) {
|
|
2050
|
+
this.viji.deltaTime = (currentTime - this.lastTime) / 1e3;
|
|
2051
|
+
this.viji.time = (currentTime - this.startTime) / 1e3;
|
|
2052
|
+
this.viji.frameCount = ++this.frameCount;
|
|
2053
|
+
this.trackEffectiveFrameTime(currentTime);
|
|
2054
|
+
this.lastTime = currentTime;
|
|
2055
|
+
try {
|
|
2056
|
+
if (this.p5Adapter && this.rendererType === "p5") {
|
|
2057
|
+
const parameterObjects = this.parameterSystem.getAllParameterObjects();
|
|
2058
|
+
this.p5Adapter.tick(this.viji, parameterObjects);
|
|
2059
|
+
} else {
|
|
2060
|
+
const renderFunction2 = self.renderFunction;
|
|
2061
|
+
if (renderFunction2 && typeof renderFunction2 === "function") {
|
|
2062
|
+
renderFunction2(this.viji);
|
|
2063
|
+
}
|
|
2064
|
+
}
|
|
2065
|
+
} catch (error) {
|
|
2066
|
+
console.error("Render error:", error);
|
|
2067
|
+
this.postMessage("error", {
|
|
2068
|
+
message: error.message,
|
|
2069
|
+
code: "RENDER_ERROR",
|
|
2070
|
+
stack: error.stack
|
|
2071
|
+
});
|
|
2072
|
+
}
|
|
2073
|
+
}
|
|
2074
|
+
this.reportPerformanceStats(currentTime);
|
|
2075
|
+
requestAnimationFrame(() => this.renderFrame());
|
|
2076
|
+
}
|
|
2077
|
+
postMessage(type, data) {
|
|
2078
|
+
self.postMessage({
|
|
2079
|
+
type,
|
|
2080
|
+
id: data?.id || `${type}_${Date.now()}`,
|
|
2081
|
+
timestamp: Date.now(),
|
|
2082
|
+
data
|
|
2083
|
+
});
|
|
2084
|
+
}
|
|
2085
|
+
// Phase 7: Interaction Message Handlers (delegated to InteractionSystem)
|
|
2086
|
+
handleMouseUpdate(message) {
|
|
2087
|
+
this.interactionSystem.handleMouseUpdate(message.data);
|
|
2088
|
+
}
|
|
2089
|
+
handleKeyboardUpdate(message) {
|
|
2090
|
+
this.interactionSystem.handleKeyboardUpdate(message.data);
|
|
2091
|
+
}
|
|
2092
|
+
handleTouchUpdate(message) {
|
|
2093
|
+
this.interactionSystem.handleTouchUpdate(message.data);
|
|
2094
|
+
}
|
|
2095
|
+
handleInteractionEnabled(message) {
|
|
2096
|
+
this.interactionSystem.setInteractionEnabled(message.data.enabled);
|
|
2097
|
+
}
|
|
2098
|
+
}
|
|
2099
|
+
class SceneAnalyzer {
|
|
2100
|
+
/**
|
|
2101
|
+
* Detects the renderer type from scene code comments
|
|
2102
|
+
*
|
|
2103
|
+
* Looks for:
|
|
2104
|
+
* - // @renderer p5
|
|
2105
|
+
* - /* @renderer p5 *\/
|
|
2106
|
+
*
|
|
2107
|
+
* @param sceneCode - The artist's scene code to analyze
|
|
2108
|
+
* @returns The detected renderer type ('p5' or 'native')
|
|
2109
|
+
*/
|
|
2110
|
+
static detectRendererType(sceneCode) {
|
|
2111
|
+
return /\/\/\s*@renderer\s+p5|\/\*\s*@renderer\s+p5\s*\*\//.test(sceneCode) ? "p5" : "native";
|
|
2112
|
+
}
|
|
2113
|
+
}
|
|
2114
|
+
const runtime = new VijiWorkerRuntime();
|
|
2115
|
+
let renderFunction = null;
|
|
2116
|
+
async function setSceneCode(sceneCode) {
|
|
2117
|
+
try {
|
|
2118
|
+
runtime.resetParameterState();
|
|
2119
|
+
const rendererType = SceneAnalyzer.detectRendererType(sceneCode);
|
|
2120
|
+
if (rendererType === "p5") {
|
|
2121
|
+
const functionBody = sceneCode + '\nreturn { setup: typeof setup !== "undefined" ? setup : null, render: typeof render !== "undefined" ? render : null };';
|
|
2122
|
+
const sceneFunction = new Function("viji", "p5", functionBody);
|
|
2123
|
+
const { setup, render } = sceneFunction(runtime.viji, null);
|
|
2124
|
+
if (!render) {
|
|
2125
|
+
throw new Error("P5 mode requires a render(viji, p5) function");
|
|
2126
|
+
}
|
|
2127
|
+
await runtime.initP5Mode(setup, render);
|
|
2128
|
+
runtime.sendAllParametersToHost();
|
|
2129
|
+
} else {
|
|
2130
|
+
const functionBody = sceneCode + '\nif (typeof render === "function") {\n return render;\n}\nthrow new Error("Scene code must define a render function");';
|
|
2131
|
+
const sceneFunction = new Function("viji", functionBody);
|
|
2132
|
+
renderFunction = sceneFunction(runtime.viji);
|
|
2133
|
+
self.renderFunction = renderFunction;
|
|
2134
|
+
runtime.sendAllParametersToHost();
|
|
2135
|
+
}
|
|
2136
|
+
} catch (error) {
|
|
2137
|
+
console.error("Failed to load scene code:", error);
|
|
2138
|
+
self.postMessage({
|
|
2139
|
+
type: "error",
|
|
2140
|
+
id: `scene_error_${Date.now()}`,
|
|
2141
|
+
timestamp: Date.now(),
|
|
2142
|
+
data: {
|
|
2143
|
+
message: `Scene code error: ${error.message}`,
|
|
2144
|
+
code: "SCENE_CODE_ERROR"
|
|
2145
|
+
}
|
|
2146
|
+
});
|
|
2147
|
+
}
|
|
2148
|
+
}
|
|
2149
|
+
self.setSceneCode = setSceneCode;
|
|
2150
|
+
//# sourceMappingURL=viji.worker-BjMgRS7D.js.map
|