@viji-dev/core 0.3.20 → 0.3.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4325 +0,0 @@
1
- class ParameterSystem {
2
- // Parameter system for Phase 2 (new object-based approach)
3
- parameterDefinitions = /* @__PURE__ */ new Map();
4
- parameterGroups = /* @__PURE__ */ new Map();
5
- parameterValues = /* @__PURE__ */ new Map();
6
- parameterObjects = /* @__PURE__ */ new Map();
7
- // Maps parameter names to their objects
8
- parametersDefined = false;
9
- initialValuesSynced = false;
10
- // Track if initial values have been synced from host
11
- // Debug logging control
12
- debugMode = false;
13
- /**
14
- * Enable or disable debug logging
15
- */
16
- setDebugMode(enabled) {
17
- this.debugMode = enabled;
18
- }
19
- /**
20
- * Debug logging helper
21
- */
22
- debugLog(message, ...args) {
23
- if (this.debugMode) {
24
- console.log(message, ...args);
25
- }
26
- }
27
- // Message posting callback
28
- postMessageCallback;
29
- constructor(postMessageCallback) {
30
- this.postMessageCallback = postMessageCallback;
31
- }
32
- // Parameter helper function implementations (return parameter objects)
33
- createSliderParameter(defaultValue, config) {
34
- const paramName = config.label;
35
- const sliderObject = {
36
- value: defaultValue,
37
- min: config.min ?? 0,
38
- max: config.max ?? 100,
39
- step: config.step ?? 1,
40
- label: config.label,
41
- description: config.description ?? "",
42
- group: config.group ?? "general",
43
- category: config.category ?? "general"
44
- };
45
- const definition = {
46
- type: "slider",
47
- defaultValue,
48
- label: sliderObject.label,
49
- description: sliderObject.description,
50
- group: sliderObject.group,
51
- category: sliderObject.category,
52
- config: {
53
- min: sliderObject.min,
54
- max: sliderObject.max,
55
- step: sliderObject.step
56
- }
57
- };
58
- this.storeParameterDefinition(paramName, definition);
59
- this.parameterObjects.set(paramName, sliderObject);
60
- return sliderObject;
61
- }
62
- createColorParameter(defaultValue, config) {
63
- const paramName = config.label;
64
- const colorObject = {
65
- value: defaultValue,
66
- label: config.label,
67
- description: config.description ?? "",
68
- group: config.group ?? "general",
69
- category: config.category ?? "general"
70
- };
71
- const definition = {
72
- type: "color",
73
- defaultValue,
74
- label: colorObject.label,
75
- description: colorObject.description,
76
- group: colorObject.group,
77
- category: colorObject.category
78
- };
79
- this.storeParameterDefinition(paramName, definition);
80
- this.parameterObjects.set(paramName, colorObject);
81
- return colorObject;
82
- }
83
- createToggleParameter(defaultValue, config) {
84
- const paramName = config.label;
85
- const toggleObject = {
86
- value: defaultValue,
87
- label: config.label,
88
- description: config.description ?? "",
89
- group: config.group ?? "general",
90
- category: config.category ?? "general"
91
- };
92
- const definition = {
93
- type: "toggle",
94
- defaultValue,
95
- label: toggleObject.label,
96
- description: toggleObject.description,
97
- group: toggleObject.group,
98
- category: toggleObject.category
99
- };
100
- this.storeParameterDefinition(paramName, definition);
101
- this.parameterObjects.set(paramName, toggleObject);
102
- return toggleObject;
103
- }
104
- createSelectParameter(defaultValue, config) {
105
- const paramName = config.label;
106
- const selectObject = {
107
- value: defaultValue,
108
- options: config.options,
109
- label: config.label,
110
- description: config.description ?? "",
111
- group: config.group ?? "general",
112
- category: config.category ?? "general"
113
- };
114
- const definition = {
115
- type: "select",
116
- defaultValue,
117
- label: selectObject.label,
118
- description: selectObject.description,
119
- group: selectObject.group,
120
- category: selectObject.category,
121
- config: {
122
- options: selectObject.options
123
- }
124
- };
125
- this.storeParameterDefinition(paramName, definition);
126
- this.parameterObjects.set(paramName, selectObject);
127
- return selectObject;
128
- }
129
- createTextParameter(defaultValue, config) {
130
- const paramName = config.label;
131
- const textObject = {
132
- value: defaultValue,
133
- maxLength: config.maxLength ?? 1e3,
134
- label: config.label,
135
- description: config.description ?? "",
136
- group: config.group ?? "general",
137
- category: config.category ?? "general"
138
- };
139
- const definition = {
140
- type: "text",
141
- defaultValue,
142
- label: textObject.label,
143
- description: textObject.description,
144
- group: textObject.group,
145
- category: textObject.category,
146
- config: {
147
- maxLength: textObject.maxLength
148
- }
149
- };
150
- this.storeParameterDefinition(paramName, definition);
151
- this.parameterObjects.set(paramName, textObject);
152
- return textObject;
153
- }
154
- createNumberParameter(defaultValue, config) {
155
- const paramName = config.label;
156
- const numberObject = {
157
- value: defaultValue,
158
- min: config.min ?? 0,
159
- max: config.max ?? 100,
160
- step: config.step ?? 1,
161
- label: config.label,
162
- description: config.description ?? "",
163
- group: config.group ?? "general",
164
- category: config.category ?? "general"
165
- };
166
- const definition = {
167
- type: "number",
168
- defaultValue,
169
- label: numberObject.label,
170
- description: numberObject.description,
171
- group: numberObject.group,
172
- category: numberObject.category,
173
- config: {
174
- min: numberObject.min,
175
- max: numberObject.max,
176
- step: numberObject.step
177
- }
178
- };
179
- this.storeParameterDefinition(paramName, definition);
180
- this.parameterObjects.set(paramName, numberObject);
181
- return numberObject;
182
- }
183
- createImageParameter(defaultValue, config) {
184
- const paramName = config.label;
185
- const imageObject = {
186
- value: defaultValue,
187
- label: config.label,
188
- description: config.description ?? "",
189
- group: config.group ?? "general",
190
- category: config.category ?? "general"
191
- };
192
- const definition = {
193
- type: "image",
194
- defaultValue,
195
- label: imageObject.label,
196
- description: imageObject.description,
197
- group: imageObject.group,
198
- category: imageObject.category
199
- };
200
- this.storeParameterDefinition(paramName, definition);
201
- this.parameterObjects.set(paramName, imageObject);
202
- return imageObject;
203
- }
204
- storeParameterDefinition(name, definition) {
205
- this.parameterDefinitions.set(name, definition);
206
- this.parameterValues.set(name, definition.defaultValue);
207
- }
208
- updateParameterValue(name, value) {
209
- const definition = this.parameterDefinitions.get(name);
210
- if (!definition) {
211
- console.warn(`Unknown parameter: ${name}. Available parameters:`, Array.from(this.parameterDefinitions.keys()));
212
- return false;
213
- }
214
- if (!this.validateParameterValue(name, value, definition)) {
215
- console.warn(`Validation failed for parameter ${name} = ${value}`);
216
- return false;
217
- }
218
- const currentValue = this.parameterValues.get(name);
219
- const isInitialSync = !this.initialValuesSynced;
220
- if (currentValue === value && !isInitialSync) {
221
- return false;
222
- }
223
- this.parameterValues.set(name, value);
224
- const parameterObject = this.parameterObjects.get(name);
225
- if (parameterObject) {
226
- parameterObject.value = value;
227
- }
228
- return true;
229
- }
230
- validateParameterValue(name, value, definition) {
231
- if (definition.validate && !definition.validate(value)) {
232
- console.error(`Custom validation failed for parameter '${name}': ${value}`);
233
- return false;
234
- }
235
- switch (definition.type) {
236
- case "slider":
237
- case "number":
238
- if (typeof value !== "number" || isNaN(value)) {
239
- console.error(`Parameter '${name}' must be a number, got: ${value}`);
240
- return false;
241
- }
242
- if (definition.config?.min !== void 0 && value < definition.config.min) {
243
- console.error(`Parameter '${name}' value ${value} is below minimum ${definition.config.min}`);
244
- return false;
245
- }
246
- if (definition.config?.max !== void 0 && value > definition.config.max) {
247
- console.error(`Parameter '${name}' value ${value} is above maximum ${definition.config.max}`);
248
- return false;
249
- }
250
- break;
251
- case "color":
252
- if (typeof value !== "string" || !/^#[0-9A-Fa-f]{6}$/.test(value)) {
253
- console.error(`Parameter '${name}' must be a valid hex color, got: ${value}`);
254
- return false;
255
- }
256
- break;
257
- case "toggle":
258
- if (typeof value !== "boolean") {
259
- console.error(`Parameter '${name}' must be a boolean, got: ${value}`);
260
- return false;
261
- }
262
- break;
263
- case "select":
264
- if (!definition.config?.options || !definition.config.options.includes(value)) {
265
- console.error(`Parameter '${name}' value ${value} is not in options: ${definition.config?.options}`);
266
- return false;
267
- }
268
- break;
269
- case "text":
270
- if (typeof value !== "string") {
271
- console.error(`Parameter '${name}' must be a string, got: ${value}`);
272
- return false;
273
- }
274
- if (definition.config?.maxLength && value.length > definition.config.maxLength) {
275
- console.error(`Parameter '${name}' text too long: ${value.length} > ${definition.config.maxLength}`);
276
- return false;
277
- }
278
- break;
279
- case "image":
280
- if (value !== null && !(value instanceof ImageBitmap) && !(value instanceof OffscreenCanvas)) {
281
- console.error(`Parameter '${name}' must be null, ImageBitmap, or OffscreenCanvas, got: ${value}`);
282
- return false;
283
- }
284
- break;
285
- }
286
- return true;
287
- }
288
- // Reset parameter state (called when loading new scene)
289
- resetParameterState() {
290
- this.parametersDefined = false;
291
- this.initialValuesSynced = false;
292
- this.parameterDefinitions.clear();
293
- this.parameterGroups.clear();
294
- this.parameterValues.clear();
295
- this.parameterObjects.clear();
296
- }
297
- // Send all parameters (from helper functions) to host
298
- sendAllParametersToHost() {
299
- if (this.parametersDefined || this.parameterDefinitions.size === 0) {
300
- return;
301
- }
302
- try {
303
- const groups = /* @__PURE__ */ new Map();
304
- for (const [paramName, paramDef] of this.parameterDefinitions) {
305
- const groupName = paramDef.group || "general";
306
- if (!groups.has(groupName)) {
307
- const category = paramDef.category || "general";
308
- groups.set(groupName, {
309
- groupName,
310
- category,
311
- parameters: {}
312
- });
313
- }
314
- const group = groups.get(groupName);
315
- group.parameters[paramName] = paramDef;
316
- }
317
- this.parametersDefined = true;
318
- this.postMessageCallback("parameters-defined", {
319
- groups: Array.from(groups.values())
320
- });
321
- this.debugLog(`All parameters sent to host: ${this.parameterDefinitions.size} parameters in ${groups.size} groups`);
322
- } catch (error) {
323
- this.postMessageCallback("parameter-validation-error", {
324
- message: `Failed to send parameters to host: ${error.message}`,
325
- code: "PARAMETER_SENDING_ERROR"
326
- });
327
- }
328
- }
329
- // Mark initial values as synced
330
- markInitialValuesSynced() {
331
- this.initialValuesSynced = true;
332
- }
333
- // Get parameter count for performance reporting
334
- getParameterCount() {
335
- return this.parameterDefinitions.size;
336
- }
337
- // Get all parameter objects (for P5 adapter to add .p5 properties)
338
- getAllParameterObjects() {
339
- return this.parameterObjects;
340
- }
341
- }
342
- class InteractionSystem {
343
- // Interaction enabled state
344
- isEnabled = true;
345
- // Mouse interaction state
346
- mouseState = {
347
- x: 0,
348
- y: 0,
349
- isInCanvas: false,
350
- isPressed: false,
351
- leftButton: false,
352
- rightButton: false,
353
- middleButton: false,
354
- velocity: { x: 0, y: 0 },
355
- deltaX: 0,
356
- deltaY: 0,
357
- wheelDelta: 0,
358
- wheelX: 0,
359
- wheelY: 0,
360
- wasPressed: false,
361
- wasReleased: false,
362
- wasMoved: false
363
- };
364
- // Keyboard interaction state
365
- keyboardState = {
366
- isPressed: (key) => this.keyboardState.activeKeys.has(key.toLowerCase()),
367
- wasPressed: (key) => this.keyboardState.pressedThisFrame.has(key.toLowerCase()),
368
- wasReleased: (key) => this.keyboardState.releasedThisFrame.has(key.toLowerCase()),
369
- activeKeys: /* @__PURE__ */ new Set(),
370
- pressedThisFrame: /* @__PURE__ */ new Set(),
371
- releasedThisFrame: /* @__PURE__ */ new Set(),
372
- lastKeyPressed: "",
373
- lastKeyReleased: "",
374
- shift: false,
375
- ctrl: false,
376
- alt: false,
377
- meta: false
378
- };
379
- // Touch interaction state
380
- touchState = {
381
- points: [],
382
- count: 0,
383
- started: [],
384
- moved: [],
385
- ended: [],
386
- primary: null,
387
- gestures: {
388
- isPinching: false,
389
- isRotating: false,
390
- isPanning: false,
391
- isTapping: false,
392
- pinchScale: 1,
393
- pinchDelta: 0,
394
- rotationAngle: 0,
395
- rotationDelta: 0,
396
- panDelta: { x: 0, y: 0 },
397
- tapCount: 0,
398
- lastTapTime: 0,
399
- tapPosition: null
400
- }
401
- };
402
- constructor() {
403
- this.handleMouseUpdate = this.handleMouseUpdate.bind(this);
404
- this.handleKeyboardUpdate = this.handleKeyboardUpdate.bind(this);
405
- this.handleTouchUpdate = this.handleTouchUpdate.bind(this);
406
- this.frameStart = this.frameStart.bind(this);
407
- }
408
- /**
409
- * Get the interaction APIs for inclusion in the viji object
410
- */
411
- getInteractionAPIs() {
412
- return {
413
- mouse: this.mouseState,
414
- keyboard: this.keyboardState,
415
- touches: this.touchState
416
- };
417
- }
418
- /**
419
- * Called at the start of each frame to reset frame-based events
420
- */
421
- frameStart() {
422
- this.mouseState.wasPressed = false;
423
- this.mouseState.wasReleased = false;
424
- this.mouseState.wasMoved = false;
425
- this.mouseState.wheelDelta = 0;
426
- this.mouseState.wheelX = 0;
427
- this.mouseState.wheelY = 0;
428
- this.keyboardState.pressedThisFrame.clear();
429
- this.keyboardState.releasedThisFrame.clear();
430
- this.touchState.started = [];
431
- this.touchState.moved = [];
432
- this.touchState.ended = [];
433
- this.touchState.gestures.isTapping = false;
434
- this.touchState.gestures.pinchDelta = 0;
435
- this.touchState.gestures.rotationDelta = 0;
436
- }
437
- /**
438
- * Handle mouse update messages from the host
439
- */
440
- handleMouseUpdate(data) {
441
- if (!this.isEnabled) return;
442
- this.mouseState.x = data.x;
443
- this.mouseState.y = data.y;
444
- this.mouseState.isInCanvas = data.isInCanvas !== void 0 ? data.isInCanvas : true;
445
- this.mouseState.leftButton = (data.buttons & 1) !== 0;
446
- this.mouseState.rightButton = (data.buttons & 2) !== 0;
447
- this.mouseState.middleButton = (data.buttons & 4) !== 0;
448
- this.mouseState.isPressed = data.buttons > 0;
449
- this.mouseState.deltaX = data.deltaX || 0;
450
- this.mouseState.deltaY = data.deltaY || 0;
451
- this.mouseState.wheelDelta += data.wheelDeltaY || 0;
452
- this.mouseState.wheelX += data.wheelDeltaX || 0;
453
- this.mouseState.wheelY += data.wheelDeltaY || 0;
454
- this.mouseState.velocity.x = data.deltaX || 0;
455
- this.mouseState.velocity.y = data.deltaY || 0;
456
- this.mouseState.wasPressed = this.mouseState.wasPressed || (data.wasPressed || false);
457
- this.mouseState.wasReleased = this.mouseState.wasReleased || (data.wasReleased || false);
458
- this.mouseState.wasMoved = this.mouseState.wasMoved || (data.deltaX !== 0 || data.deltaY !== 0);
459
- }
460
- /**
461
- * Handle keyboard update messages from the host
462
- */
463
- handleKeyboardUpdate(data) {
464
- if (!this.isEnabled) return;
465
- const key = data.key.toLowerCase();
466
- if (data.type === "keydown") {
467
- if (!this.keyboardState.activeKeys.has(key)) {
468
- this.keyboardState.activeKeys.add(key);
469
- this.keyboardState.pressedThisFrame.add(key);
470
- this.keyboardState.lastKeyPressed = data.key;
471
- }
472
- } else if (data.type === "keyup") {
473
- this.keyboardState.activeKeys.delete(key);
474
- this.keyboardState.releasedThisFrame.add(key);
475
- this.keyboardState.lastKeyReleased = data.key;
476
- }
477
- this.keyboardState.shift = data.shiftKey;
478
- this.keyboardState.ctrl = data.ctrlKey;
479
- this.keyboardState.alt = data.altKey;
480
- this.keyboardState.meta = data.metaKey;
481
- }
482
- /**
483
- * Handle touch update messages from the host
484
- */
485
- handleTouchUpdate(data) {
486
- if (!this.isEnabled) return;
487
- this.touchState.started = [];
488
- this.touchState.moved = [];
489
- this.touchState.ended = [];
490
- const touches = data.touches.map((touch) => ({
491
- id: touch.identifier,
492
- x: touch.clientX,
493
- y: touch.clientY,
494
- pressure: touch.pressure || 0,
495
- radius: Math.max(touch.radiusX || 0, touch.radiusY || 0),
496
- radiusX: touch.radiusX || 0,
497
- radiusY: touch.radiusY || 0,
498
- rotationAngle: touch.rotationAngle || 0,
499
- force: touch.force || touch.pressure || 0,
500
- deltaX: 0,
501
- // Could be calculated if we track previous positions
502
- deltaY: 0,
503
- velocity: { x: 0, y: 0 },
504
- // Could be calculated if we track movement
505
- isNew: data.type === "touchstart",
506
- isActive: true,
507
- isEnding: data.type === "touchend" || data.type === "touchcancel"
508
- }));
509
- this.touchState.points = touches;
510
- this.touchState.count = touches.length;
511
- this.touchState.primary = touches[0] || null;
512
- if (data.type === "touchstart") {
513
- this.touchState.started = touches;
514
- } else if (data.type === "touchmove") {
515
- this.touchState.moved = touches;
516
- } else if (data.type === "touchend" || data.type === "touchcancel") {
517
- this.touchState.ended = touches;
518
- }
519
- this.touchState.gestures = {
520
- isPinching: false,
521
- isRotating: false,
522
- isPanning: false,
523
- isTapping: false,
524
- pinchScale: 1,
525
- pinchDelta: 0,
526
- rotationAngle: 0,
527
- rotationDelta: 0,
528
- panDelta: { x: 0, y: 0 },
529
- tapCount: 0,
530
- lastTapTime: 0,
531
- tapPosition: null
532
- };
533
- }
534
- /**
535
- * Reset all interaction state (called when loading new scene)
536
- */
537
- resetInteractionState() {
538
- Object.assign(this.mouseState, {
539
- x: 0,
540
- y: 0,
541
- isInCanvas: false,
542
- isPressed: false,
543
- leftButton: false,
544
- rightButton: false,
545
- middleButton: false,
546
- velocity: { x: 0, y: 0 },
547
- deltaX: 0,
548
- deltaY: 0,
549
- wheelDelta: 0,
550
- wheelX: 0,
551
- wheelY: 0,
552
- wasPressed: false,
553
- wasReleased: false,
554
- wasMoved: false
555
- });
556
- this.keyboardState.activeKeys.clear();
557
- this.keyboardState.pressedThisFrame.clear();
558
- this.keyboardState.releasedThisFrame.clear();
559
- this.keyboardState.lastKeyPressed = "";
560
- this.keyboardState.lastKeyReleased = "";
561
- this.keyboardState.shift = false;
562
- this.keyboardState.ctrl = false;
563
- this.keyboardState.alt = false;
564
- this.keyboardState.meta = false;
565
- this.touchState.points = [];
566
- this.touchState.count = 0;
567
- this.touchState.started = [];
568
- this.touchState.moved = [];
569
- this.touchState.ended = [];
570
- this.touchState.primary = null;
571
- Object.assign(this.touchState.gestures, {
572
- isPinching: false,
573
- isRotating: false,
574
- isPanning: false,
575
- isTapping: false,
576
- pinchScale: 1,
577
- pinchDelta: 0,
578
- rotationAngle: 0,
579
- rotationDelta: 0,
580
- panDelta: { x: 0, y: 0 },
581
- tapCount: 0,
582
- lastTapTime: 0,
583
- tapPosition: null
584
- });
585
- }
586
- /**
587
- * Enable or disable interaction processing
588
- */
589
- setInteractionEnabled(enabled) {
590
- this.isEnabled = enabled;
591
- if (!enabled) {
592
- this.resetInteractionStates();
593
- }
594
- }
595
- /**
596
- * Get current interaction enabled state
597
- */
598
- getInteractionEnabled() {
599
- return this.isEnabled;
600
- }
601
- /**
602
- * Reset all interaction states to default values
603
- */
604
- resetInteractionStates() {
605
- this.mouseState.x = 0;
606
- this.mouseState.y = 0;
607
- this.mouseState.isInCanvas = false;
608
- this.mouseState.isPressed = false;
609
- this.mouseState.leftButton = false;
610
- this.mouseState.rightButton = false;
611
- this.mouseState.middleButton = false;
612
- this.mouseState.velocity.x = 0;
613
- this.mouseState.velocity.y = 0;
614
- this.mouseState.deltaX = 0;
615
- this.mouseState.deltaY = 0;
616
- this.mouseState.wheelDelta = 0;
617
- this.mouseState.wheelX = 0;
618
- this.mouseState.wheelY = 0;
619
- this.mouseState.wasPressed = false;
620
- this.mouseState.wasReleased = false;
621
- this.mouseState.wasMoved = false;
622
- this.keyboardState.activeKeys.clear();
623
- this.keyboardState.pressedThisFrame.clear();
624
- this.keyboardState.releasedThisFrame.clear();
625
- this.keyboardState.lastKeyPressed = "";
626
- this.keyboardState.lastKeyReleased = "";
627
- this.keyboardState.shift = false;
628
- this.keyboardState.ctrl = false;
629
- this.keyboardState.alt = false;
630
- this.keyboardState.meta = false;
631
- this.touchState.points = [];
632
- this.touchState.count = 0;
633
- this.touchState.started = [];
634
- this.touchState.moved = [];
635
- this.touchState.ended = [];
636
- this.touchState.primary = null;
637
- this.touchState.gestures.isPinching = false;
638
- this.touchState.gestures.isRotating = false;
639
- this.touchState.gestures.isPanning = false;
640
- this.touchState.gestures.isTapping = false;
641
- this.touchState.gestures.pinchScale = 1;
642
- this.touchState.gestures.pinchDelta = 0;
643
- this.touchState.gestures.rotationAngle = 0;
644
- this.touchState.gestures.rotationDelta = 0;
645
- this.touchState.gestures.panDelta = { x: 0, y: 0 };
646
- this.touchState.gestures.tapCount = 0;
647
- this.touchState.gestures.lastTapTime = 0;
648
- this.touchState.gestures.tapPosition = null;
649
- }
650
- }
651
- class CVSystem {
652
- // MediaPipe Tasks Vision worker
653
- cvWorker = null;
654
- workerRestartCount = 0;
655
- maxWorkerRestarts = 3;
656
- workerLastRestart = 0;
657
- workerRestartCooldown = 5e3;
658
- // 5 seconds
659
- // Feature activation state
660
- activeFeatures = /* @__PURE__ */ new Set();
661
- pendingFeatures = /* @__PURE__ */ new Set();
662
- // Features to restore after restart
663
- // CV Results cache (for non-blocking processing)
664
- results = {
665
- faces: [],
666
- hands: [],
667
- pose: null,
668
- segmentation: null
669
- };
670
- // Processing state and performance tracking
671
- processing = false;
672
- cvFrameCounter = 0;
673
- cvFrameRateMode = "quarter";
674
- // Default: 1/4 scene rate
675
- sceneTargetFPS = 60;
676
- // Will be updated from scene processing rate
677
- processingStartTime = 0;
678
- processingTimes = [];
679
- // CV Frame Rate Tracking (similar to main core)
680
- cvFrameTimes = [];
681
- lastCVFrameTime = 0;
682
- actualCVFPS = 0;
683
- debugMode = false;
684
- // Debug mode disabled for production
685
- constructor() {
686
- this.debugLog("🔧 CVSystem initialized for MediaPipe Tasks Vision");
687
- }
688
- debugLog(...args) {
689
- if (this.debugMode) {
690
- console.log("🔧 [CVSystem]", ...args);
691
- }
692
- }
693
- setDebugMode(enabled) {
694
- this.debugMode = enabled;
695
- this.debugLog(`Debug mode ${enabled ? "enabled" : "disabled"}`);
696
- }
697
- /**
698
- * Update CV frame rate configuration (called from worker)
699
- */
700
- updateCVFrameRate(mode, sceneTargetFPS) {
701
- this.cvFrameRateMode = mode;
702
- this.sceneTargetFPS = sceneTargetFPS;
703
- this.debugLog(`CV frame rate updated: mode=${mode}, sceneTargetFPS=${sceneTargetFPS}`);
704
- }
705
- /**
706
- * Initialize MediaPipe Tasks Vision worker
707
- */
708
- async ensureCVWorker() {
709
- if (this.cvWorker) return;
710
- try {
711
- this.debugLog("🔧 Creating MediaPipe Tasks Vision worker...");
712
- const workerUrl = "/dist/assets/cv-tasks.worker.js";
713
- this.cvWorker = new Worker(workerUrl);
714
- this.cvWorker.addEventListener("message", (evt) => {
715
- const msg = evt.data;
716
- this.debugLog(`📨 [CV Worker -> CVSystem] ${msg.type}`, msg.success ? msg.data : msg.error);
717
- });
718
- this.cvWorker.onerror = (err) => {
719
- this.debugLog("❌ CV worker error event", err.message);
720
- this.handleWorkerFailure(`Worker error: ${err.message}`);
721
- };
722
- this.cvWorker.onmessageerror = (err) => {
723
- this.debugLog("❌ CV worker message error", err);
724
- this.handleWorkerFailure("Worker message error");
725
- };
726
- await this.postToCV("init", {});
727
- this.debugLog("✅ CV worker initialized");
728
- } catch (error) {
729
- this.debugLog("❌ Failed to initialize CV worker:", error);
730
- throw error;
731
- }
732
- }
733
- /**
734
- * Handle worker failure and attempt restart
735
- */
736
- async handleWorkerFailure(reason) {
737
- this.debugLog(`⚠️ CV Worker failure: ${reason}`);
738
- if (this.cvWorker) {
739
- this.cvWorker.terminate();
740
- this.cvWorker = null;
741
- }
742
- const now = Date.now();
743
- if (this.workerRestartCount >= this.maxWorkerRestarts) {
744
- this.debugLog("❌ Max worker restarts exceeded, giving up");
745
- return;
746
- }
747
- if (now - this.workerLastRestart < this.workerRestartCooldown) {
748
- this.debugLog("⏱️ Worker restart cooldown active, skipping restart");
749
- return;
750
- }
751
- this.pendingFeatures = new Set(this.activeFeatures);
752
- this.activeFeatures.clear();
753
- try {
754
- this.workerRestartCount++;
755
- this.workerLastRestart = now;
756
- this.debugLog(`🔄 Restarting CV worker (attempt ${this.workerRestartCount}/${this.maxWorkerRestarts})`);
757
- await this.ensureCVWorker();
758
- if (this.pendingFeatures.size > 0) {
759
- const featuresToRestore = Array.from(this.pendingFeatures);
760
- this.debugLog(`🔄 Restoring features: [${featuresToRestore.join(", ")}]`);
761
- try {
762
- await this.postToCV("config", { features: featuresToRestore });
763
- this.activeFeatures = new Set(this.pendingFeatures);
764
- this.debugLog("✅ Features restored successfully");
765
- } catch (error) {
766
- this.debugLog("❌ Failed to restore features:", error);
767
- }
768
- this.pendingFeatures.clear();
769
- }
770
- this.debugLog("✅ CV worker restarted successfully");
771
- } catch (error) {
772
- this.debugLog("❌ Failed to restart CV worker:", error);
773
- }
774
- }
775
- /**
776
- * Send message to CV worker and wait for response
777
- */
778
- postToCV(type, data, transfer) {
779
- return new Promise((resolve, reject) => {
780
- if (!this.cvWorker) {
781
- return reject(new Error("CV worker not initialized"));
782
- }
783
- const timeout = setTimeout(() => {
784
- this.debugLog(`⏱️ [CV Worker] timeout for ${type}`);
785
- if (type === "config") {
786
- this.handleWorkerFailure(`Timeout for ${type} message`);
787
- }
788
- reject(new Error(`CV worker timeout for ${type}`));
789
- }, 5e3);
790
- const onMessage = (ev) => {
791
- const msg = ev.data;
792
- if (msg.type === "result") {
793
- clearTimeout(timeout);
794
- this.cvWorker.removeEventListener("message", onMessage);
795
- if (msg.success) {
796
- this.debugLog(`✅ [CV Worker] response for ${type}`, msg.data);
797
- resolve(msg.data);
798
- } else {
799
- this.debugLog(`❌ [CV Worker] error response for ${type}`, msg.error);
800
- if (msg.restartRequired) {
801
- this.handleWorkerFailure(`Worker reported restart required: ${msg.error}`);
802
- }
803
- reject(new Error(msg.error || "CV worker error"));
804
- }
805
- }
806
- };
807
- this.cvWorker.addEventListener("message", onMessage);
808
- const message = {
809
- type,
810
- ...data
811
- };
812
- this.debugLog(`📤 [CVSystem -> CV Worker] ${type}`, data);
813
- this.cvWorker.postMessage(message, transfer || []);
814
- });
815
- }
816
- /**
817
- * Enable face detection feature (bounding boxes only)
818
- */
819
- async enableFaceDetection() {
820
- if (this.activeFeatures.has("faceDetection")) return;
821
- try {
822
- this.debugLog("🔧 Enabling face detection...");
823
- if (this.activeFeatures.size >= 3 && !this.checkWebGLContextAvailability()) {
824
- this.debugLog("⚠️ Warning: WebGL contexts may be running low. Consider disabling unused CV features.");
825
- }
826
- await this.ensureCVWorker();
827
- const newFeatures = Array.from(this.activeFeatures).concat(["faceDetection"]);
828
- await this.postToCV("config", { features: newFeatures });
829
- this.activeFeatures.add("faceDetection");
830
- this.debugLog("✅ Face detection enabled");
831
- } catch (error) {
832
- this.debugLog("❌ Failed to enable face detection:", error);
833
- throw error;
834
- }
835
- }
836
- /**
837
- * Disable face detection and cleanup
838
- */
839
- async disableFaceDetection() {
840
- if (!this.activeFeatures.has("faceDetection")) return;
841
- try {
842
- this.debugLog("🔧 Disabling face detection...");
843
- this.activeFeatures.delete("faceDetection");
844
- if (!this.activeFeatures.has("faceMesh")) {
845
- this.results.faces = [];
846
- }
847
- const newFeatures = Array.from(this.activeFeatures);
848
- await this.postToCV("config", { features: newFeatures });
849
- this.debugLog("✅ Face detection disabled and cleaned up");
850
- } catch (error) {
851
- this.debugLog("❌ Failed to disable face detection:", error);
852
- throw error;
853
- }
854
- }
855
- /**
856
- * Enable face mesh feature (468-point facial landmarks)
857
- */
858
- async enableFaceMesh() {
859
- if (this.activeFeatures.has("faceMesh")) return;
860
- try {
861
- this.debugLog("🔧 Enabling face mesh...");
862
- if (this.activeFeatures.size >= 3 && !this.checkWebGLContextAvailability()) {
863
- this.debugLog("⚠️ Warning: WebGL contexts may be running low. Consider disabling unused CV features.");
864
- }
865
- await this.ensureCVWorker();
866
- const newFeatures = Array.from(this.activeFeatures).concat(["faceMesh"]);
867
- await this.postToCV("config", { features: newFeatures });
868
- this.activeFeatures.add("faceMesh");
869
- this.debugLog("✅ Face mesh enabled");
870
- } catch (error) {
871
- this.debugLog("❌ Failed to enable face mesh:", error);
872
- throw error;
873
- }
874
- }
875
- /**
876
- * Disable face mesh and cleanup
877
- */
878
- async disableFaceMesh() {
879
- if (!this.activeFeatures.has("faceMesh")) return;
880
- try {
881
- this.debugLog("🔧 Disabling face mesh...");
882
- this.activeFeatures.delete("faceMesh");
883
- if (!this.activeFeatures.has("faceDetection")) {
884
- this.results.faces = [];
885
- }
886
- const newFeatures = Array.from(this.activeFeatures);
887
- await this.postToCV("config", { features: newFeatures });
888
- this.debugLog("✅ Face mesh disabled and cleaned up");
889
- } catch (error) {
890
- this.debugLog("❌ Failed to disable face mesh:", error);
891
- throw error;
892
- }
893
- }
894
- /**
895
- * Enable hand tracking feature
896
- */
897
- async enableHandTracking() {
898
- if (this.activeFeatures.has("handTracking")) return;
899
- try {
900
- this.debugLog("🔧 Enabling hand tracking...");
901
- await this.ensureCVWorker();
902
- const newFeatures = Array.from(this.activeFeatures).concat(["handTracking"]);
903
- await this.postToCV("config", { features: newFeatures });
904
- this.activeFeatures.add("handTracking");
905
- this.debugLog("✅ Hand tracking enabled");
906
- } catch (error) {
907
- this.debugLog("❌ Failed to enable hand tracking:", error);
908
- throw error;
909
- }
910
- }
911
- /**
912
- * Disable hand tracking and cleanup
913
- */
914
- async disableHandTracking() {
915
- if (!this.activeFeatures.has("handTracking")) return;
916
- try {
917
- this.debugLog("🔧 Disabling hand tracking...");
918
- this.activeFeatures.delete("handTracking");
919
- this.results.hands = [];
920
- const newFeatures = Array.from(this.activeFeatures);
921
- await this.postToCV("config", { features: newFeatures });
922
- this.debugLog("✅ Hand tracking disabled and cleaned up");
923
- } catch (error) {
924
- this.debugLog("❌ Failed to disable hand tracking:", error);
925
- throw error;
926
- }
927
- }
928
- /**
929
- * Enable pose detection feature
930
- */
931
- async enablePoseDetection() {
932
- if (this.activeFeatures.has("poseDetection")) return;
933
- try {
934
- this.debugLog("🔧 Enabling pose detection...");
935
- await this.ensureCVWorker();
936
- const newFeatures = Array.from(this.activeFeatures).concat(["poseDetection"]);
937
- await this.postToCV("config", { features: newFeatures });
938
- this.activeFeatures.add("poseDetection");
939
- this.debugLog("✅ Pose detection enabled");
940
- } catch (error) {
941
- this.debugLog("❌ Failed to enable pose detection:", error);
942
- throw error;
943
- }
944
- }
945
- /**
946
- * Disable pose detection and cleanup
947
- */
948
- async disablePoseDetection() {
949
- if (!this.activeFeatures.has("poseDetection")) return;
950
- try {
951
- this.debugLog("🔧 Disabling pose detection...");
952
- this.activeFeatures.delete("poseDetection");
953
- this.results.pose = null;
954
- const newFeatures = Array.from(this.activeFeatures);
955
- await this.postToCV("config", { features: newFeatures });
956
- this.debugLog("✅ Pose detection disabled and cleaned up");
957
- } catch (error) {
958
- this.debugLog("❌ Failed to disable pose detection:", error);
959
- throw error;
960
- }
961
- }
962
- /**
963
- * Enable body segmentation feature
964
- */
965
- async enableBodySegmentation() {
966
- if (this.activeFeatures.has("bodySegmentation")) return;
967
- try {
968
- this.debugLog("🔧 Enabling body segmentation...");
969
- await this.ensureCVWorker();
970
- const newFeatures = Array.from(this.activeFeatures).concat(["bodySegmentation"]);
971
- await this.postToCV("config", { features: newFeatures });
972
- this.activeFeatures.add("bodySegmentation");
973
- this.debugLog("✅ Body segmentation enabled");
974
- } catch (error) {
975
- this.debugLog("❌ Failed to enable body segmentation:", error);
976
- throw error;
977
- }
978
- }
979
- /**
980
- * Disable body segmentation and cleanup
981
- */
982
- async disableBodySegmentation() {
983
- if (!this.activeFeatures.has("bodySegmentation")) return;
984
- try {
985
- this.debugLog("🔧 Disabling body segmentation...");
986
- this.activeFeatures.delete("bodySegmentation");
987
- this.results.segmentation = null;
988
- const newFeatures = Array.from(this.activeFeatures);
989
- await this.postToCV("config", { features: newFeatures });
990
- this.debugLog("✅ Body segmentation disabled and cleaned up");
991
- } catch (error) {
992
- this.debugLog("❌ Failed to disable body segmentation:", error);
993
- throw error;
994
- }
995
- }
996
- /**
997
- * Process video frame with active CV features
998
- */
999
- async processFrame(bitmap) {
1000
- if (this.processing || this.activeFeatures.size === 0) {
1001
- return;
1002
- }
1003
- this.cvFrameCounter++;
1004
- const shouldProcess = this.shouldProcessFrame();
1005
- if (!shouldProcess) {
1006
- return;
1007
- }
1008
- this.processing = true;
1009
- this.processingStartTime = performance.now();
1010
- this.trackCVFrameRate();
1011
- this.debugLog(`🎬 Processing frame ${this.cvFrameCounter} with features:`, Array.from(this.activeFeatures));
1012
- try {
1013
- const features = Array.from(this.activeFeatures);
1014
- const timestamp = performance.now();
1015
- const processPromise = this.postToCV("process", {
1016
- bitmap,
1017
- timestamp,
1018
- features
1019
- }, [bitmap]);
1020
- const timeoutPromise = new Promise((_, reject) => {
1021
- setTimeout(() => reject(new Error("CV processing timeout")), 500);
1022
- });
1023
- const results = await Promise.race([processPromise, timeoutPromise]);
1024
- if (results.faces && (this.activeFeatures.has("faceDetection") || this.activeFeatures.has("faceMesh"))) {
1025
- this.results.faces = results.faces;
1026
- this.debugLog(`📥 Received ${results.faces.length} face results`);
1027
- }
1028
- if (results.hands && this.activeFeatures.has("handTracking")) {
1029
- this.results.hands = results.hands;
1030
- this.debugLog(`📥 Received ${results.hands.length} hand results`);
1031
- }
1032
- if (results.pose && this.activeFeatures.has("poseDetection")) {
1033
- this.results.pose = results.pose;
1034
- this.debugLog(`📥 Received pose results with ${results.pose.landmarks.length} landmarks`);
1035
- }
1036
- if (results.segmentation && this.activeFeatures.has("bodySegmentation")) {
1037
- this.results.segmentation = results.segmentation;
1038
- this.debugLog(`📥 Received segmentation results ${results.segmentation.width}x${results.segmentation.height}`);
1039
- }
1040
- const processingTime = performance.now() - this.processingStartTime;
1041
- this.processingTimes.push(processingTime);
1042
- if (this.processingTimes.length > 30) {
1043
- this.processingTimes.shift();
1044
- }
1045
- } catch (error) {
1046
- this.debugLog("⚠️ CV processing failed:", error);
1047
- } finally {
1048
- this.processing = false;
1049
- }
1050
- }
1051
- /**
1052
- * Check if current frame should be processed based on CV frame rate mode
1053
- */
1054
- shouldProcessFrame() {
1055
- const divisor = this.getFrameRateDivisor();
1056
- return this.cvFrameCounter % divisor === 0;
1057
- }
1058
- /**
1059
- * Track CV processing frame rate (similar to main core)
1060
- */
1061
- trackCVFrameRate() {
1062
- const now = performance.now();
1063
- if (this.lastCVFrameTime > 0) {
1064
- const deltaTime = now - this.lastCVFrameTime;
1065
- this.cvFrameTimes.push(deltaTime);
1066
- if (this.cvFrameTimes.length > 30) {
1067
- this.cvFrameTimes.shift();
1068
- }
1069
- if (this.cvFrameTimes.length > 0) {
1070
- const avgDeltaTime = this.cvFrameTimes.reduce((a, b) => a + b, 0) / this.cvFrameTimes.length;
1071
- this.actualCVFPS = Math.round(1e3 / avgDeltaTime);
1072
- }
1073
- }
1074
- this.lastCVFrameTime = now;
1075
- }
1076
- /**
1077
- * Get frame rate divisor based on current mode
1078
- */
1079
- getFrameRateDivisor() {
1080
- switch (this.cvFrameRateMode) {
1081
- case "full":
1082
- return 1;
1083
- case "half":
1084
- return 2;
1085
- case "quarter":
1086
- return 4;
1087
- case "eighth":
1088
- return 8;
1089
- default:
1090
- return 4;
1091
- }
1092
- }
1093
- /**
1094
- * Get current CV results
1095
- */
1096
- getResults() {
1097
- return { ...this.results };
1098
- }
1099
- /**
1100
- * Get processing statistics
1101
- */
1102
- getStats() {
1103
- const avgProcessingTime = this.processingTimes.length > 0 ? this.processingTimes.reduce((a, b) => a + b, 0) / this.processingTimes.length : 0;
1104
- const targetFPS = this.sceneTargetFPS / this.getFrameRateDivisor();
1105
- return {
1106
- activeFeatures: Array.from(this.activeFeatures),
1107
- processingTime: avgProcessingTime,
1108
- effectiveFPS: targetFPS,
1109
- actualFPS: this.actualCVFPS,
1110
- // Add actual measured CV FPS
1111
- isProcessing: this.processing
1112
- };
1113
- }
1114
- /**
1115
- * Check WebGL context availability before enabling features
1116
- */
1117
- checkWebGLContextAvailability() {
1118
- try {
1119
- const canvas = new OffscreenCanvas(1, 1);
1120
- const gl = canvas.getContext("webgl");
1121
- if (!gl) {
1122
- this.debugLog("⚠️ WebGL contexts may be exhausted");
1123
- return false;
1124
- }
1125
- const ext = gl.getExtension("WEBGL_lose_context");
1126
- if (ext) ext.loseContext();
1127
- return true;
1128
- } catch (error) {
1129
- this.debugLog("⚠️ WebGL context check failed:", error);
1130
- return false;
1131
- }
1132
- }
1133
- /**
1134
- * Get CV control interface for artist API
1135
- */
1136
- getControlInterface() {
1137
- return {
1138
- enableFaceDetection: (enabled) => {
1139
- if (enabled === false) {
1140
- return this.disableFaceDetection();
1141
- } else {
1142
- return this.enableFaceDetection();
1143
- }
1144
- },
1145
- disableFaceDetection: () => this.disableFaceDetection(),
1146
- enableFaceMesh: (enabled) => {
1147
- if (enabled === false) {
1148
- return this.disableFaceMesh();
1149
- } else {
1150
- return this.enableFaceMesh();
1151
- }
1152
- },
1153
- disableFaceMesh: () => this.disableFaceMesh(),
1154
- enableHandTracking: (enabled) => {
1155
- if (enabled === false) {
1156
- return this.disableHandTracking();
1157
- } else {
1158
- return this.enableHandTracking();
1159
- }
1160
- },
1161
- disableHandTracking: () => this.disableHandTracking(),
1162
- enablePoseDetection: (enabled) => {
1163
- if (enabled === false) {
1164
- return this.disablePoseDetection();
1165
- } else {
1166
- return this.enablePoseDetection();
1167
- }
1168
- },
1169
- disablePoseDetection: () => this.disablePoseDetection(),
1170
- enableBodySegmentation: (enabled) => {
1171
- if (enabled === false) {
1172
- return this.disableBodySegmentation();
1173
- } else {
1174
- return this.enableBodySegmentation();
1175
- }
1176
- },
1177
- disableBodySegmentation: () => this.disableBodySegmentation(),
1178
- getActiveFeatures: () => Array.from(this.activeFeatures),
1179
- isProcessing: () => this.processing,
1180
- getStats: () => this.getStats(),
1181
- getWorkerStatus: () => ({
1182
- healthy: !!this.cvWorker,
1183
- restartCount: this.workerRestartCount,
1184
- maxRestarts: this.maxWorkerRestarts
1185
- }),
1186
- restartWorker: () => this.handleWorkerFailure("Manual restart requested"),
1187
- // WebGL context monitoring
1188
- checkWebGLAvailability: () => this.checkWebGLContextAvailability(),
1189
- getResourceUsage: () => ({
1190
- activeFeatures: this.activeFeatures.size,
1191
- estimatedWebGLContexts: this.activeFeatures.size * 2,
1192
- // ~2 contexts per feature
1193
- webglAvailable: this.checkWebGLContextAvailability()
1194
- })
1195
- };
1196
- }
1197
- /**
1198
- * Cleanup all CV resources
1199
- */
1200
- async cleanup() {
1201
- this.debugLog("🔧 Cleaning up CVSystem...");
1202
- for (const feature of Array.from(this.activeFeatures)) {
1203
- switch (feature) {
1204
- case "faceDetection":
1205
- await this.disableFaceDetection();
1206
- break;
1207
- case "handTracking":
1208
- await this.disableHandTracking();
1209
- break;
1210
- case "poseDetection":
1211
- await this.disablePoseDetection();
1212
- break;
1213
- case "bodySegmentation":
1214
- await this.disableBodySegmentation();
1215
- break;
1216
- }
1217
- }
1218
- if (this.cvWorker) {
1219
- this.cvWorker.terminate();
1220
- this.cvWorker = null;
1221
- }
1222
- this.activeFeatures.clear();
1223
- this.pendingFeatures.clear();
1224
- this.workerRestartCount = 0;
1225
- this.results.faces = [];
1226
- this.results.hands = [];
1227
- this.results.pose = null;
1228
- this.results.segmentation = null;
1229
- this.processing = false;
1230
- this.processingTimes = [];
1231
- this.debugLog("✅ CVSystem cleanup complete");
1232
- }
1233
- }
1234
- class VideoSystem {
1235
- // ✅ CORRECT: Worker-owned OffscreenCanvas (transferred from host)
1236
- offscreenCanvas = null;
1237
- ctx = null;
1238
- gl = null;
1239
- // CV processing helpers
1240
- cvScratchCanvas = null;
1241
- cvScratchContext = null;
1242
- // Debug logging control
1243
- debugMode = false;
1244
- // GPU-only mode flag (for shader compositor only - P5/Canvas need OffscreenCanvas)
1245
- directGPUMode = false;
1246
- // Stream categorization
1247
- streamType = "additional";
1248
- deviceId;
1249
- /**
1250
- * Enable or disable debug logging
1251
- */
1252
- setDebugMode(enabled) {
1253
- this.debugMode = enabled;
1254
- if (this.cvSystem) {
1255
- this.cvSystem.setDebugMode(enabled);
1256
- }
1257
- }
1258
- /**
1259
- * Debug logging helper
1260
- */
1261
- debugLog(message, ...args) {
1262
- if (this.debugMode) {
1263
- console.log(message, ...args);
1264
- }
1265
- }
1266
- // Frame processing configuration
1267
- targetFrameRate = 30;
1268
- // Default target FPS for video processing
1269
- lastFrameTime = 0;
1270
- frameInterval = 1e3 / this.targetFrameRate;
1271
- // ms between frames
1272
- // Processing state
1273
- hasLoggedFirstFrame = false;
1274
- frameCount = 0;
1275
- // Video state for artist API
1276
- videoState = {
1277
- isConnected: false,
1278
- currentFrame: null,
1279
- frameWidth: 0,
1280
- frameHeight: 0,
1281
- frameRate: 0,
1282
- frameData: null
1283
- };
1284
- // Phase 11 - CV System Integration
1285
- cvSystem;
1286
- constructor() {
1287
- this.cvSystem = new CVSystem();
1288
- }
1289
- /**
1290
- * Set stream type and optional device ID
1291
- */
1292
- setStreamType(type, deviceId) {
1293
- this.streamType = type;
1294
- this.deviceId = deviceId;
1295
- }
1296
- /**
1297
- * Get stream type
1298
- */
1299
- getStreamType() {
1300
- return this.streamType;
1301
- }
1302
- /**
1303
- * Get device ID (for device streams)
1304
- */
1305
- getDeviceId() {
1306
- return this.deviceId;
1307
- }
1308
- /**
1309
- * Get the video API for inclusion in the viji object
1310
- */
1311
- getVideoAPI() {
1312
- const cvResults = this.cvSystem.getResults();
1313
- const currentFrame = this.videoState.currentFrame;
1314
- if (this.directGPUMode && currentFrame instanceof ImageBitmap) {
1315
- this.videoState.currentFrame = null;
1316
- }
1317
- return {
1318
- isConnected: this.videoState.isConnected,
1319
- currentFrame,
1320
- frameWidth: this.videoState.frameWidth,
1321
- frameHeight: this.videoState.frameHeight,
1322
- frameRate: this.videoState.frameRate,
1323
- getFrameData: () => this.videoState.frameData,
1324
- // CV Results from CVSystem
1325
- faces: cvResults.faces,
1326
- hands: cvResults.hands,
1327
- pose: cvResults.pose,
1328
- segmentation: cvResults.segmentation,
1329
- // CV Control Interface
1330
- cv: this.cvSystem.getControlInterface()
1331
- };
1332
- }
1333
- /**
1334
- * ✅ CORRECT: Receive OffscreenCanvas transfer from host
1335
- */
1336
- handleCanvasSetup(data) {
1337
- try {
1338
- this.disconnectVideo();
1339
- this.offscreenCanvas = data.offscreenCanvas;
1340
- this.ctx = this.offscreenCanvas.getContext("2d", {
1341
- willReadFrequently: true
1342
- // Optimize for frequent getImageData calls
1343
- });
1344
- if (!this.ctx) {
1345
- throw new Error("Failed to get 2D context from transferred OffscreenCanvas");
1346
- }
1347
- try {
1348
- this.gl = this.offscreenCanvas.getContext("webgl2") || this.offscreenCanvas.getContext("webgl");
1349
- } catch (e) {
1350
- this.debugLog("WebGL not available, using 2D context only");
1351
- }
1352
- this.videoState.isConnected = true;
1353
- this.videoState.currentFrame = this.offscreenCanvas;
1354
- this.videoState.frameWidth = data.width;
1355
- this.videoState.frameHeight = data.height;
1356
- this.frameCount = 0;
1357
- this.hasLoggedFirstFrame = false;
1358
- this.debugLog("✅ OffscreenCanvas received and setup completed (worker-side)", {
1359
- width: data.width,
1360
- height: data.height,
1361
- hasWebGL: !!this.gl,
1362
- targetFrameRate: this.targetFrameRate
1363
- });
1364
- this.debugLog("🎬 CORRECT OffscreenCanvas approach - Worker has full GPU access!");
1365
- } catch (error) {
1366
- console.error("Failed to setup OffscreenCanvas in worker:", error);
1367
- this.disconnectVideo();
1368
- }
1369
- }
1370
- /**
1371
- * ✅ CORRECT: Receive ImageBitmap frame and draw to worker's OffscreenCanvas
1372
- */
1373
- handleFrameUpdate(data) {
1374
- if (!this.offscreenCanvas || !this.ctx) {
1375
- console.warn("🔴 Received frame but OffscreenCanvas not setup");
1376
- data.imageBitmap.close();
1377
- return;
1378
- }
1379
- try {
1380
- if (this.frameCount % 150 === 0) {
1381
- this.debugLog("✅ Worker received ImageBitmap frame:", {
1382
- bitmapSize: `${data.imageBitmap.width}x${data.imageBitmap.height}`,
1383
- canvasSize: `${this.offscreenCanvas.width}x${this.offscreenCanvas.height}`,
1384
- frameCount: this.frameCount,
1385
- timestamp: data.timestamp
1386
- });
1387
- }
1388
- this.ctx.drawImage(data.imageBitmap, 0, 0, this.offscreenCanvas.width, this.offscreenCanvas.height);
1389
- this.processCurrentFrame(data.timestamp);
1390
- data.imageBitmap.close();
1391
- this.frameCount++;
1392
- } catch (error) {
1393
- console.error("🔴 Error processing video frame (worker-side):", error);
1394
- }
1395
- }
1396
- /**
1397
- * Process current frame (called when new frame is drawn)
1398
- */
1399
- async processCurrentFrame(timestamp) {
1400
- if (!this.offscreenCanvas || !this.ctx) {
1401
- return;
1402
- }
1403
- try {
1404
- this.videoState.frameData = this.ctx.getImageData(
1405
- 0,
1406
- 0,
1407
- this.offscreenCanvas.width,
1408
- this.offscreenCanvas.height
1409
- );
1410
- const deltaTime = timestamp - this.lastFrameTime;
1411
- this.videoState.frameRate = deltaTime > 0 ? 1e3 / deltaTime : 0;
1412
- if (!this.hasLoggedFirstFrame) {
1413
- this.debugLog(`🎯 Worker-side OffscreenCanvas processing active: ${this.videoState.frameRate.toFixed(1)} FPS (${this.offscreenCanvas.width}x${this.offscreenCanvas.height})`);
1414
- this.debugLog("✅ Full GPU access available for custom effects and CV analysis");
1415
- this.hasLoggedFirstFrame = true;
1416
- }
1417
- if (this.videoState.frameData) {
1418
- try {
1419
- const bitmap = await createImageBitmap(this.videoState.frameData);
1420
- this.cvSystem.processFrame(bitmap);
1421
- } catch (bitmapError) {
1422
- this.debugLog("⚠️ createImageBitmap failed – falling back to reusable CV canvas:", bitmapError);
1423
- if (!this.cvScratchCanvas || !this.cvScratchContext || this.cvScratchCanvas.width !== this.videoState.frameData.width || this.cvScratchCanvas.height !== this.videoState.frameData.height) {
1424
- this.cvScratchCanvas = new OffscreenCanvas(
1425
- this.videoState.frameData.width,
1426
- this.videoState.frameData.height
1427
- );
1428
- this.cvScratchContext = this.cvScratchCanvas.getContext("2d");
1429
- if (!this.cvScratchContext) {
1430
- throw new Error("Failed to get 2D context for CV fallback canvas");
1431
- }
1432
- }
1433
- this.cvScratchContext.putImageData(this.videoState.frameData, 0, 0);
1434
- const fallbackBitmap = this.cvScratchCanvas.transferToImageBitmap();
1435
- this.cvSystem.processFrame(fallbackBitmap);
1436
- }
1437
- }
1438
- this.lastFrameTime = timestamp;
1439
- } catch (error) {
1440
- console.error("Error processing video frame (worker-side):", error);
1441
- }
1442
- }
1443
- /**
1444
- * Handle video configuration updates (including disconnection and resize)
1445
- */
1446
- handleVideoConfigUpdate(data) {
1447
- try {
1448
- if (data.disconnect) {
1449
- this.disconnectVideo();
1450
- return;
1451
- }
1452
- if (data.width && data.height && this.offscreenCanvas) {
1453
- this.resizeCanvas(data.width, data.height);
1454
- }
1455
- if (data.targetFrameRate) {
1456
- this.updateProcessingConfig(data.targetFrameRate);
1457
- }
1458
- if (data.cvFrameRate) {
1459
- this.updateCVFrameRate(data.cvFrameRate);
1460
- }
1461
- } catch (error) {
1462
- console.error("Error handling video config update:", error);
1463
- }
1464
- }
1465
- /**
1466
- * Resize the OffscreenCanvas (when video dimensions change)
1467
- */
1468
- resizeCanvas(width, height) {
1469
- if (!this.offscreenCanvas) return;
1470
- try {
1471
- this.offscreenCanvas.width = width;
1472
- this.offscreenCanvas.height = height;
1473
- this.videoState.frameWidth = width;
1474
- this.videoState.frameHeight = height;
1475
- if (this.gl) {
1476
- this.gl.viewport(0, 0, width, height);
1477
- }
1478
- this.debugLog(`📐 OffscreenCanvas resized to ${width}x${height} (worker-side)`);
1479
- } catch (error) {
1480
- console.error("Error resizing OffscreenCanvas:", error);
1481
- }
1482
- }
1483
- /**
1484
- * Disconnect video and clean up resources
1485
- */
1486
- disconnectVideo() {
1487
- if (this.offscreenCanvas && this.ctx) {
1488
- this.ctx.clearRect(0, 0, this.offscreenCanvas.width, this.offscreenCanvas.height);
1489
- this.debugLog("🧹 Cleared OffscreenCanvas on disconnect");
1490
- }
1491
- this.offscreenCanvas = null;
1492
- this.ctx = null;
1493
- this.gl = null;
1494
- this.videoState.isConnected = false;
1495
- this.videoState.currentFrame = null;
1496
- this.videoState.frameWidth = 0;
1497
- this.videoState.frameHeight = 0;
1498
- this.videoState.frameRate = 0;
1499
- this.videoState.frameData = null;
1500
- this.cvScratchCanvas = null;
1501
- this.cvScratchContext = null;
1502
- this.hasLoggedFirstFrame = false;
1503
- this.frameCount = 0;
1504
- this.debugLog("Video disconnected (worker-side)");
1505
- }
1506
- /**
1507
- * Update video processing configuration
1508
- */
1509
- updateProcessingConfig(targetFrameRate) {
1510
- this.targetFrameRate = Math.max(1, Math.min(60, targetFrameRate));
1511
- this.frameInterval = 1e3 / this.targetFrameRate;
1512
- this.debugLog(`Video processing frame rate updated to ${this.targetFrameRate} FPS (worker-side)`);
1513
- }
1514
- /**
1515
- * Phase 11 - Update CV frame rate configuration
1516
- */
1517
- updateCVFrameRate(cvFrameRate) {
1518
- this.cvSystem.updateCVFrameRate(cvFrameRate.mode, cvFrameRate.sceneTargetFPS);
1519
- this.debugLog(`CV frame rate updated to ${cvFrameRate.mode} of ${cvFrameRate.sceneTargetFPS} FPS (worker-side)`);
1520
- }
1521
- /**
1522
- * Reset all video state (called when loading new scene)
1523
- */
1524
- resetVideoState() {
1525
- this.disconnectVideo();
1526
- if (this.cvSystem) {
1527
- this.cvSystem.cleanup();
1528
- }
1529
- }
1530
- /**
1531
- * Get current processing configuration
1532
- */
1533
- getProcessingConfig() {
1534
- return {
1535
- targetFrameRate: this.targetFrameRate,
1536
- frameInterval: this.frameInterval,
1537
- frameCount: this.frameCount
1538
- };
1539
- }
1540
- /**
1541
- * Get WebGL context for advanced effects (if available)
1542
- */
1543
- getWebGLContext() {
1544
- return this.gl;
1545
- }
1546
- /**
1547
- * ✅ WORKER API: Artists can access the OffscreenCanvas directly for custom effects
1548
- */
1549
- getCanvasForArtistEffects() {
1550
- return this.offscreenCanvas;
1551
- }
1552
- /**
1553
- * Get CV processing statistics
1554
- */
1555
- getCVStats() {
1556
- return this.cvSystem.getStats();
1557
- }
1558
- /**
1559
- * Initialize for direct frame injection (no MediaStream)
1560
- * Enables GPU-only mode for zero-copy pipeline (ImageBitmaps are pre-flipped at capture)
1561
- */
1562
- initializeForDirectFrames(consumerRendererType) {
1563
- this.disconnectVideo();
1564
- this.directGPUMode = consumerRendererType === "shader";
1565
- if (this.directGPUMode) {
1566
- this.debugLog("VideoSystem initialized in direct GPU mode (zero-copy, pre-flipped ImageBitmaps)");
1567
- } else {
1568
- this.offscreenCanvas = new OffscreenCanvas(1920, 1080);
1569
- this.ctx = this.offscreenCanvas.getContext("2d", { willReadFrequently: true });
1570
- if (!this.ctx) {
1571
- throw new Error("Failed to get 2D context for direct frames");
1572
- }
1573
- this.videoState.currentFrame = this.offscreenCanvas;
1574
- this.debugLog("VideoSystem initialized with canvas (P5/Canvas consumer)");
1575
- }
1576
- this.videoState.isConnected = false;
1577
- }
1578
- /**
1579
- * Handle directly injected frame (zero-copy)
1580
- */
1581
- handleDirectFrame(data) {
1582
- if (this.directGPUMode) {
1583
- if (this.videoState.currentFrame instanceof ImageBitmap) {
1584
- this.videoState.currentFrame.close();
1585
- }
1586
- this.videoState.currentFrame = data.imageBitmap;
1587
- this.videoState.frameWidth = data.imageBitmap.width;
1588
- this.videoState.frameHeight = data.imageBitmap.height;
1589
- this.videoState.isConnected = true;
1590
- } else {
1591
- if (!this.offscreenCanvas || !this.ctx) {
1592
- this.debugLog("[Compositor] Initializing canvas for direct frames");
1593
- this.offscreenCanvas = new OffscreenCanvas(data.imageBitmap.width, data.imageBitmap.height);
1594
- this.ctx = this.offscreenCanvas.getContext("2d", { willReadFrequently: true });
1595
- }
1596
- if (this.offscreenCanvas.width !== data.imageBitmap.width || this.offscreenCanvas.height !== data.imageBitmap.height) {
1597
- this.offscreenCanvas.width = data.imageBitmap.width;
1598
- this.offscreenCanvas.height = data.imageBitmap.height;
1599
- }
1600
- this.ctx.drawImage(data.imageBitmap, 0, 0);
1601
- this.videoState.currentFrame = this.offscreenCanvas;
1602
- this.videoState.frameWidth = data.imageBitmap.width;
1603
- this.videoState.frameHeight = data.imageBitmap.height;
1604
- this.videoState.isConnected = true;
1605
- this.processCurrentFrame(data.timestamp);
1606
- data.imageBitmap.close();
1607
- }
1608
- }
1609
- }
1610
- class P5WorkerAdapter {
1611
- constructor(offscreenCanvas, _vijiAPI, sceneCode) {
1612
- this.offscreenCanvas = offscreenCanvas;
1613
- this.setupFn = sceneCode.setup || null;
1614
- this.renderFn = sceneCode.render;
1615
- this.installMinimalShims();
1616
- }
1617
- p5Instance = null;
1618
- setupFn = null;
1619
- renderFn = null;
1620
- p5InternalSetupComplete = false;
1621
- artistSetupComplete = false;
1622
- p5Class = null;
1623
- // Cache for converted P5.Image objects
1624
- imageParameterCache = /* @__PURE__ */ new Map();
1625
- // Track if P5.js's main canvas has been created
1626
- mainCanvasCreated = false;
1627
- /**
1628
- * Initialize P5 instance after P5.js library is loaded
1629
- * This must be called after the P5 class is available
1630
- */
1631
- async init() {
1632
- try {
1633
- const p5Module = await import("https://esm.sh/p5@1.9.4");
1634
- this.p5Class = p5Module.default || p5Module;
1635
- const setupPromise = new Promise((resolve) => {
1636
- new this.p5Class((p) => {
1637
- this.p5Instance = p;
1638
- p.setup = () => {
1639
- p.createCanvas(this.offscreenCanvas.width, this.offscreenCanvas.height);
1640
- p.noLoop();
1641
- this.p5InternalSetupComplete = true;
1642
- resolve();
1643
- };
1644
- p.draw = () => {
1645
- };
1646
- setTimeout(() => {
1647
- if (p.setup && typeof p.setup === "function") {
1648
- try {
1649
- p.setup();
1650
- } catch (setupError) {
1651
- console.error("P5 setup failed:", setupError);
1652
- resolve();
1653
- }
1654
- }
1655
- }, 0);
1656
- });
1657
- });
1658
- await setupPromise;
1659
- } catch (error) {
1660
- console.error("Failed to initialize P5.js:", error);
1661
- throw error;
1662
- }
1663
- }
1664
- /**
1665
- * Install minimal DOM shims that P5.js needs for rendering
1666
- */
1667
- installMinimalShims() {
1668
- const self2 = globalThis;
1669
- if (typeof self2.document === "undefined") {
1670
- const createStyleProxy = () => new Proxy({}, {
1671
- get: () => "",
1672
- set: () => true
1673
- });
1674
- const bodyElement = {
1675
- style: createStyleProxy(),
1676
- appendChild: () => {
1677
- },
1678
- removeChild: () => {
1679
- },
1680
- children: [],
1681
- childNodes: [],
1682
- firstChild: null,
1683
- lastChild: null,
1684
- parentNode: null,
1685
- ownerDocument: void 0,
1686
- // Will be set after document is created
1687
- setAttribute: () => {
1688
- },
1689
- getAttribute: () => null,
1690
- addEventListener: () => {
1691
- },
1692
- removeEventListener: () => {
1693
- },
1694
- tagName: "BODY"
1695
- };
1696
- self2.document = {
1697
- createElement: (tag) => {
1698
- if (tag === "canvas") {
1699
- let canvas;
1700
- if (!this.mainCanvasCreated) {
1701
- canvas = this.offscreenCanvas;
1702
- this.mainCanvasCreated = true;
1703
- } else {
1704
- canvas = new OffscreenCanvas(300, 300);
1705
- }
1706
- canvas.style = createStyleProxy();
1707
- canvas.dataset = new Proxy({}, {
1708
- get: () => void 0,
1709
- set: () => true
1710
- });
1711
- canvas.classList = {
1712
- add: () => {
1713
- },
1714
- remove: () => {
1715
- },
1716
- contains: () => false,
1717
- toggle: () => false
1718
- };
1719
- canvas.getBoundingClientRect = () => ({
1720
- left: 0,
1721
- top: 0,
1722
- width: canvas.width,
1723
- height: canvas.height
1724
- });
1725
- return canvas;
1726
- }
1727
- return {
1728
- style: createStyleProxy(),
1729
- appendChild: () => {
1730
- },
1731
- removeChild: () => {
1732
- },
1733
- setAttribute: () => {
1734
- },
1735
- getAttribute: () => null,
1736
- tagName: tag.toUpperCase(),
1737
- addEventListener: () => {
1738
- },
1739
- removeEventListener: () => {
1740
- }
1741
- };
1742
- },
1743
- createElementNS: (_ns, tag) => {
1744
- return self2.document.createElement(tag);
1745
- },
1746
- body: bodyElement,
1747
- documentElement: {
1748
- style: createStyleProxy(),
1749
- children: [],
1750
- childNodes: []
1751
- },
1752
- getElementById: () => null,
1753
- querySelector: () => null,
1754
- querySelectorAll: () => [],
1755
- getElementsByTagName: (tagName) => {
1756
- if (tagName.toLowerCase() === "main") {
1757
- return [bodyElement];
1758
- }
1759
- return [];
1760
- },
1761
- addEventListener: () => {
1762
- },
1763
- removeEventListener: () => {
1764
- },
1765
- hasFocus: () => true
1766
- // P5.js checks this for accessibility features
1767
- };
1768
- bodyElement.ownerDocument = self2.document;
1769
- }
1770
- if (typeof self2.window === "undefined") {
1771
- self2.window = {
1772
- devicePixelRatio: 1,
1773
- innerWidth: this.offscreenCanvas.width,
1774
- innerHeight: this.offscreenCanvas.height,
1775
- addEventListener: () => {
1776
- },
1777
- removeEventListener: () => {
1778
- },
1779
- requestAnimationFrame: (_callback) => {
1780
- return 0;
1781
- },
1782
- cancelAnimationFrame: () => {
1783
- },
1784
- setTimeout: self2.setTimeout.bind(self2),
1785
- clearTimeout: self2.clearTimeout.bind(self2),
1786
- setInterval: self2.setInterval.bind(self2),
1787
- clearInterval: self2.clearInterval.bind(self2),
1788
- performance: self2.performance,
1789
- console: self2.console,
1790
- Math: self2.Math,
1791
- Date: self2.Date,
1792
- Array: self2.Array,
1793
- Object: self2.Object
1794
- };
1795
- }
1796
- if (typeof self2.navigator === "undefined") {
1797
- self2.navigator = {
1798
- userAgent: "Viji-Worker-P5",
1799
- platform: "Worker",
1800
- language: "en-US"
1801
- };
1802
- }
1803
- if (typeof self2.screen === "undefined") {
1804
- self2.screen = {
1805
- width: this.offscreenCanvas.width,
1806
- height: this.offscreenCanvas.height,
1807
- availWidth: this.offscreenCanvas.width,
1808
- availHeight: this.offscreenCanvas.height,
1809
- colorDepth: 24,
1810
- pixelDepth: 24
1811
- };
1812
- }
1813
- if (typeof self2.HTMLCanvasElement === "undefined") {
1814
- self2.HTMLCanvasElement = function() {
1815
- };
1816
- Object.setPrototypeOf(OffscreenCanvas.prototype, self2.HTMLCanvasElement.prototype);
1817
- }
1818
- }
1819
- /**
1820
- * Convert ImageBitmap to a P5.js-compatible image object (with caching)
1821
- * Returns an object that mimics P5.Image structure for P5.js's image() function
1822
- */
1823
- getOrCreateP5Image(cacheKey, source) {
1824
- if (!this.p5Instance) return null;
1825
- const cached = this.imageParameterCache.get(cacheKey);
1826
- if (cached && cached.source === source) {
1827
- return cached.p5Image;
1828
- }
1829
- try {
1830
- const offscreenCanvas = new OffscreenCanvas(source.width, source.height);
1831
- const ctx = offscreenCanvas.getContext("2d");
1832
- if (!ctx) {
1833
- throw new Error("Failed to get 2d context from OffscreenCanvas");
1834
- }
1835
- ctx.drawImage(source, 0, 0);
1836
- const p5ImageWrapper = {
1837
- canvas: offscreenCanvas,
1838
- // P5.js looks for img.canvas || img.elt
1839
- elt: offscreenCanvas,
1840
- // Fallback for compatibility
1841
- width: source.width,
1842
- // Logical width
1843
- height: source.height
1844
- // Logical height
1845
- };
1846
- this.imageParameterCache.set(cacheKey, { source, p5Image: p5ImageWrapper });
1847
- return p5ImageWrapper;
1848
- } catch (error) {
1849
- console.warn("Failed to convert image to P5-compatible object:", error);
1850
- return null;
1851
- }
1852
- }
1853
- /**
1854
- * Wrap video frames in P5.js-compatible format
1855
- * P5.js expects images to have {canvas, elt, width, height} structure
1856
- * This wrapping is done per-frame for P5 scenes only, keeping the artist API unchanged
1857
- */
1858
- wrapVideoFramesForP5(vijiAPI) {
1859
- if (vijiAPI.video?.currentFrame instanceof OffscreenCanvas) {
1860
- const canvas = vijiAPI.video.currentFrame;
1861
- vijiAPI.video.currentFrame = {
1862
- canvas,
1863
- elt: canvas,
1864
- width: canvas.width,
1865
- height: canvas.height
1866
- };
1867
- } else if (vijiAPI.video?.currentFrame instanceof ImageBitmap) {
1868
- const bitmap = vijiAPI.video.currentFrame;
1869
- vijiAPI.video.currentFrame = {
1870
- elt: bitmap,
1871
- width: bitmap.width,
1872
- height: bitmap.height
1873
- };
1874
- }
1875
- if (Array.isArray(vijiAPI.streams)) {
1876
- for (const stream of vijiAPI.streams) {
1877
- if (stream?.currentFrame instanceof OffscreenCanvas) {
1878
- const canvas = stream.currentFrame;
1879
- stream.currentFrame = {
1880
- canvas,
1881
- elt: canvas,
1882
- width: canvas.width,
1883
- height: canvas.height
1884
- };
1885
- } else if (stream?.currentFrame instanceof ImageBitmap) {
1886
- const bitmap = stream.currentFrame;
1887
- stream.currentFrame = {
1888
- elt: bitmap,
1889
- width: bitmap.width,
1890
- height: bitmap.height
1891
- };
1892
- }
1893
- }
1894
- }
1895
- }
1896
- /**
1897
- * Add .p5 property to image parameters for P5.js-specific rendering
1898
- * This allows artists to use p5.image() while keeping .value for native canvas API
1899
- * @param parameterObjects Map of parameter name to parameter object from ParameterSystem
1900
- */
1901
- addP5PropertyToImageParameters(parameterObjects) {
1902
- if (!this.p5Instance) return;
1903
- const isImageLike = (value) => {
1904
- return value instanceof ImageBitmap || value instanceof OffscreenCanvas || value && typeof value === "object" && "width" in value && "height" in value;
1905
- };
1906
- for (const [name, param] of parameterObjects) {
1907
- try {
1908
- if (param && typeof param === "object" && "value" in param) {
1909
- const value = param.value;
1910
- if (value && isImageLike(value) && !("p5" in param)) {
1911
- Object.defineProperty(param, "p5", {
1912
- get: () => this.getOrCreateP5Image(name, param.value),
1913
- enumerable: true,
1914
- configurable: true
1915
- });
1916
- }
1917
- }
1918
- } catch (error) {
1919
- console.warn(`Failed to add .p5 property to parameter '${name}':`, error);
1920
- continue;
1921
- }
1922
- }
1923
- }
1924
- /**
1925
- * Execute one frame of the P5 scene
1926
- * Called by Viji's render loop
1927
- * @param vijiAPI The Viji API object passed to artist code
1928
- * @param parameterObjects Map of parameter objects from ParameterSystem
1929
- */
1930
- tick(vijiAPI, parameterObjects) {
1931
- if (!this.p5Instance || !this.p5InternalSetupComplete) {
1932
- return;
1933
- }
1934
- try {
1935
- this.addP5PropertyToImageParameters(parameterObjects);
1936
- this.wrapVideoFramesForP5(vijiAPI);
1937
- if (!this.artistSetupComplete && this.setupFn) {
1938
- this.setupFn(vijiAPI, this.p5Instance);
1939
- this.artistSetupComplete = true;
1940
- }
1941
- if (this.p5Instance._setProperty) {
1942
- this.p5Instance._setProperty("frameCount", this.p5Instance.frameCount + 1);
1943
- }
1944
- if (this.renderFn) {
1945
- this.renderFn(vijiAPI, this.p5Instance);
1946
- }
1947
- } catch (error) {
1948
- console.error("P5 render error:", error);
1949
- throw error;
1950
- }
1951
- }
1952
- /**
1953
- * Handle canvas resize
1954
- */
1955
- resize(width, height) {
1956
- if (!this.p5Instance) return;
1957
- this.p5Instance._setProperty("width", width);
1958
- this.p5Instance._setProperty("height", height);
1959
- this.p5Instance._setProperty("_width", width);
1960
- this.p5Instance._setProperty("_height", height);
1961
- if (this.p5Instance._renderer) {
1962
- this.p5Instance._renderer.width = width;
1963
- this.p5Instance._renderer.height = height;
1964
- }
1965
- if (typeof this.p5Instance.resizeCanvas === "function") {
1966
- try {
1967
- this.p5Instance.resizeCanvas(width, height, true);
1968
- } catch (error) {
1969
- console.warn("P5 resize warning:", error);
1970
- }
1971
- }
1972
- }
1973
- /**
1974
- * Cleanup P5 instance
1975
- */
1976
- destroy() {
1977
- if (this.p5Instance) {
1978
- try {
1979
- if (typeof this.p5Instance.remove === "function") {
1980
- this.p5Instance.remove();
1981
- }
1982
- } catch (error) {
1983
- console.warn("P5 cleanup warning:", error);
1984
- }
1985
- this.p5Instance = null;
1986
- }
1987
- this.setupFn = null;
1988
- this.renderFn = null;
1989
- this.p5InternalSetupComplete = false;
1990
- this.artistSetupComplete = false;
1991
- }
1992
- }
1993
- class ShaderParameterParser {
1994
- /**
1995
- * Parse all parameter declarations from shader code
1996
- */
1997
- static parseParameters(shaderCode) {
1998
- const parameters = [];
1999
- const lines = shaderCode.split("\n");
2000
- for (const line of lines) {
2001
- const trimmed = line.trim();
2002
- const match = trimmed.match(/\/\/\s*@viji-(\w+):(\w+)\s+(.+)/);
2003
- if (match) {
2004
- const [, type, uniformName, configStr] = match;
2005
- try {
2006
- const config = this.parseKeyValuePairs(configStr);
2007
- const param = {
2008
- type,
2009
- uniformName,
2010
- label: config.label || uniformName,
2011
- default: config.default,
2012
- config
2013
- };
2014
- this.validateParameter(param);
2015
- parameters.push(param);
2016
- } catch (error) {
2017
- console.warn(`Failed to parse shader parameter: ${line}`, error);
2018
- }
2019
- }
2020
- }
2021
- return parameters;
2022
- }
2023
- /**
2024
- * Parse key:value pairs from configuration string
2025
- */
2026
- static parseKeyValuePairs(configStr) {
2027
- const config = {};
2028
- const keyValueRegex = /(\w+):((?:"[^"]*"|\[[^\]]*\]|#[0-9a-fA-F]{6}|#[0-9a-fA-F]{3}|[^\s]+))/g;
2029
- let match;
2030
- while ((match = keyValueRegex.exec(configStr)) !== null) {
2031
- const [, key, value] = match;
2032
- config[key] = this.parseValue(value);
2033
- }
2034
- return config;
2035
- }
2036
- /**
2037
- * Parse individual value from string
2038
- */
2039
- static parseValue(value) {
2040
- if (value.startsWith('"') && value.endsWith('"')) {
2041
- return value.slice(1, -1);
2042
- }
2043
- if (value.startsWith("[") && value.endsWith("]")) {
2044
- try {
2045
- return JSON.parse(value);
2046
- } catch {
2047
- const items = value.slice(1, -1).split(",").map((s) => s.trim());
2048
- return items.map((item) => {
2049
- if (item.startsWith('"') && item.endsWith('"')) {
2050
- return item.slice(1, -1);
2051
- }
2052
- const num2 = parseFloat(item);
2053
- return isNaN(num2) ? item : num2;
2054
- });
2055
- }
2056
- }
2057
- if (value.startsWith("#")) {
2058
- return value;
2059
- }
2060
- if (value === "true") return true;
2061
- if (value === "false") return false;
2062
- const num = parseFloat(value);
2063
- if (!isNaN(num)) return num;
2064
- return value;
2065
- }
2066
- /**
2067
- * Validate parameter definition
2068
- */
2069
- static validateParameter(param) {
2070
- if (!param.type) {
2071
- throw new Error("Parameter type is required");
2072
- }
2073
- if (!param.uniformName) {
2074
- throw new Error("Parameter uniformName is required");
2075
- }
2076
- if (!param.config.label) {
2077
- throw new Error(`Parameter ${param.uniformName} missing required 'label' key`);
2078
- }
2079
- switch (param.type) {
2080
- case "slider":
2081
- case "number":
2082
- if (param.config.default === void 0) {
2083
- throw new Error(`Parameter ${param.uniformName} of type ${param.type} missing required 'default' key`);
2084
- }
2085
- break;
2086
- case "color":
2087
- if (param.config.default === void 0) {
2088
- throw new Error(`Parameter ${param.uniformName} of type 'color' missing required 'default' key`);
2089
- }
2090
- if (!param.config.default.startsWith("#")) {
2091
- throw new Error(`Parameter ${param.uniformName} of type 'color' default must be hex color (e.g., #ff0000)`);
2092
- }
2093
- break;
2094
- case "toggle":
2095
- if (param.config.default === void 0) {
2096
- throw new Error(`Parameter ${param.uniformName} of type 'toggle' missing required 'default' key`);
2097
- }
2098
- if (typeof param.config.default !== "boolean") {
2099
- throw new Error(`Parameter ${param.uniformName} of type 'toggle' default must be boolean (true or false)`);
2100
- }
2101
- break;
2102
- case "select":
2103
- if (param.config.default === void 0) {
2104
- throw new Error(`Parameter ${param.uniformName} of type 'select' missing required 'default' key`);
2105
- }
2106
- if (!param.config.options || !Array.isArray(param.config.options)) {
2107
- throw new Error(`Parameter ${param.uniformName} of type 'select' missing required 'options' key (array)`);
2108
- }
2109
- break;
2110
- case "image":
2111
- break;
2112
- default:
2113
- console.warn(`Unknown parameter type: ${param.type}`);
2114
- }
2115
- if (param.uniformName.startsWith("u_")) {
2116
- console.warn(`Parameter name "${param.uniformName}" uses reserved prefix "u_". Consider renaming to avoid conflicts with built-in uniforms.`);
2117
- }
2118
- }
2119
- /**
2120
- * Generate uniform declaration for a parameter
2121
- */
2122
- static generateUniformDeclaration(param) {
2123
- switch (param.type) {
2124
- case "slider":
2125
- case "number":
2126
- return `uniform float ${param.uniformName};`;
2127
- case "color":
2128
- return `uniform vec3 ${param.uniformName};`;
2129
- case "toggle":
2130
- return `uniform bool ${param.uniformName};`;
2131
- case "select":
2132
- return `uniform int ${param.uniformName};`;
2133
- case "image":
2134
- return `uniform sampler2D ${param.uniformName};`;
2135
- default:
2136
- return `// Unknown parameter type: ${param.type}`;
2137
- }
2138
- }
2139
- }
2140
- class ShaderWorkerAdapter {
2141
- constructor(offscreenCanvas, _vijiAPI, shaderCode) {
2142
- this.shaderCode = shaderCode;
2143
- this.glslVersion = this.detectGLSLVersion(shaderCode);
2144
- this.backbufferEnabled = shaderCode.includes("backbuffer");
2145
- if (this.glslVersion === "glsl300") {
2146
- const gl = offscreenCanvas.getContext("webgl2");
2147
- if (!gl) {
2148
- throw new Error("WebGL 2 not supported. Use GLSL ES 1.00 syntax instead.");
2149
- }
2150
- this.gl = gl;
2151
- } else {
2152
- const gl = offscreenCanvas.getContext("webgl");
2153
- if (!gl) {
2154
- throw new Error("WebGL not supported");
2155
- }
2156
- this.gl = gl;
2157
- }
2158
- }
2159
- static MAX_STREAMS = 8;
2160
- // Maximum number of compositor input streams
2161
- static MAX_EXTERNAL_DEVICES = 8;
2162
- // Maximum number of external devices
2163
- static MAX_DEVICE_VIDEOS = 8;
2164
- // Maximum number of device video streams
2165
- gl;
2166
- program = null;
2167
- uniformLocations = /* @__PURE__ */ new Map();
2168
- textureUnits = /* @__PURE__ */ new Map();
2169
- nextTextureUnit = 0;
2170
- textures = /* @__PURE__ */ new Map();
2171
- // Fullscreen quad
2172
- quadBuffer = null;
2173
- // Parameter definitions
2174
- parameters = [];
2175
- // GLSL version detection
2176
- glslVersion = "glsl100";
2177
- // Audio FFT texture
2178
- audioFFTTexture = null;
2179
- videoTexture = null;
2180
- segmentationTexture = null;
2181
- // Multi-stream textures
2182
- streamTextures = [];
2183
- // Device video textures
2184
- deviceTextures = new Array(ShaderWorkerAdapter.MAX_DEVICE_VIDEOS).fill(null);
2185
- // Backbuffer support (ping-pong framebuffers)
2186
- backbufferFramebuffer = null;
2187
- backbufferTexture = null;
2188
- currentFramebuffer = null;
2189
- currentTexture = null;
2190
- backbufferEnabled = false;
2191
- /**
2192
- * Initialize the shader adapter
2193
- */
2194
- async init() {
2195
- try {
2196
- this.parameters = ShaderParameterParser.parseParameters(this.shaderCode);
2197
- this.createFullscreenQuad();
2198
- const processedCode = this.injectUniforms(this.shaderCode);
2199
- this.compileAndLinkShader(processedCode);
2200
- this.cacheUniformLocations();
2201
- this.reserveTextureUnits();
2202
- if (this.backbufferEnabled) {
2203
- this.createBackbufferFramebuffers();
2204
- }
2205
- } catch (error) {
2206
- console.error("Failed to initialize ShaderWorkerAdapter:", error);
2207
- throw error;
2208
- }
2209
- }
2210
- /**
2211
- * Detect GLSL version from shader code
2212
- */
2213
- detectGLSLVersion(code) {
2214
- return code.includes("#version 300") ? "glsl300" : "glsl100";
2215
- }
2216
- /**
2217
- * Create fullscreen quad geometry
2218
- */
2219
- createFullscreenQuad() {
2220
- const vertices = new Float32Array([
2221
- -1,
2222
- -1,
2223
- // Bottom-left
2224
- 1,
2225
- -1,
2226
- // Bottom-right
2227
- -1,
2228
- 1,
2229
- // Top-left
2230
- 1,
2231
- 1
2232
- // Top-right
2233
- ]);
2234
- this.quadBuffer = this.gl.createBuffer();
2235
- this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.quadBuffer);
2236
- this.gl.bufferData(this.gl.ARRAY_BUFFER, vertices, this.gl.STATIC_DRAW);
2237
- }
2238
- /**
2239
- * Inject built-in and parameter uniforms into shader code
2240
- */
2241
- injectUniforms(artistCode) {
2242
- let versionLine = "";
2243
- let codeWithoutVersion = artistCode;
2244
- const lines = artistCode.split("\n");
2245
- for (let i = 0; i < lines.length; i++) {
2246
- const trimmed = lines[i].trim();
2247
- if (trimmed.startsWith("#version")) {
2248
- versionLine = trimmed;
2249
- lines[i] = "";
2250
- codeWithoutVersion = lines.join("\n");
2251
- break;
2252
- }
2253
- }
2254
- const injectionPoint = this.findInjectionPoint(codeWithoutVersion);
2255
- const builtInUniforms = this.getBuiltInUniforms();
2256
- const parameterUniforms = this.parameters.map((p) => ShaderParameterParser.generateUniformDeclaration(p)).join("\n");
2257
- const usesFwidth = artistCode.includes("fwidth");
2258
- if (usesFwidth && this.glslVersion === "glsl100") {
2259
- const ext = this.gl.getExtension("OES_standard_derivatives");
2260
- if (!ext) {
2261
- console.warn("Shader uses fwidth() but OES_standard_derivatives extension is not supported. Shader may not compile.");
2262
- }
2263
- }
2264
- const parts = [];
2265
- if (usesFwidth && this.glslVersion === "glsl100") {
2266
- parts.push("#extension GL_OES_standard_derivatives : enable");
2267
- }
2268
- if (this.glslVersion === "glsl100") {
2269
- parts.push("");
2270
- parts.push("#ifdef GL_ES");
2271
- parts.push("precision mediump float;");
2272
- parts.push("#endif");
2273
- } else {
2274
- parts.push("");
2275
- parts.push("precision mediump float;");
2276
- }
2277
- parts.push("");
2278
- parts.push("// ===== VIJI AUTO-INJECTED UNIFORMS =====");
2279
- parts.push("// Built-in uniforms (auto-provided)");
2280
- parts.push(builtInUniforms);
2281
- parts.push("");
2282
- parts.push("// Parameter uniforms (from @viji-* declarations)");
2283
- parts.push(parameterUniforms);
2284
- parts.push("");
2285
- parts.push("// ===== ARTIST CODE =====");
2286
- const uniformBlock = parts.join("\n");
2287
- const codeWithUniforms = codeWithoutVersion.slice(0, injectionPoint) + "\n" + uniformBlock + "\n" + codeWithoutVersion.slice(injectionPoint);
2288
- const finalCode = versionLine ? versionLine + "\n" + codeWithUniforms : codeWithUniforms;
2289
- console.log("=== INJECTED SHADER CODE (first 50 lines) ===");
2290
- console.log(finalCode.split("\n").slice(0, 50).join("\n"));
2291
- console.log("=== END INJECTED CODE ===");
2292
- return finalCode;
2293
- }
2294
- /**
2295
- * Find where to inject extensions and uniforms
2296
- * Extensions must come after #version but before any code
2297
- *
2298
- * Strategy:
2299
- * 1. If #version exists, inject right after it
2300
- * 2. Otherwise, skip ALL comments (single and multi-line) and inject before first code
2301
- */
2302
- findInjectionPoint(code) {
2303
- const lines = code.split("\n");
2304
- for (let i = 0; i < lines.length; i++) {
2305
- const line = lines[i].trim();
2306
- if (line.startsWith("#version")) {
2307
- return this.getLineEndPosition(code, i);
2308
- }
2309
- }
2310
- let inMultiLineComment = false;
2311
- let firstCodeLine = 0;
2312
- for (let i = 0; i < lines.length; i++) {
2313
- const line = lines[i].trim();
2314
- if (line.includes("/*")) {
2315
- inMultiLineComment = true;
2316
- }
2317
- if (line.includes("*/")) {
2318
- inMultiLineComment = false;
2319
- firstCodeLine = i + 1;
2320
- continue;
2321
- }
2322
- if (inMultiLineComment) {
2323
- continue;
2324
- }
2325
- if (line === "" || line.startsWith("//")) {
2326
- firstCodeLine = i + 1;
2327
- continue;
2328
- }
2329
- break;
2330
- }
2331
- if (firstCodeLine > 0 && firstCodeLine < lines.length) {
2332
- return this.getLineEndPosition(code, firstCodeLine - 1);
2333
- }
2334
- return 0;
2335
- }
2336
- /**
2337
- * Get byte position of end of line N
2338
- */
2339
- getLineEndPosition(code, lineNumber) {
2340
- const lines = code.split("\n");
2341
- let position = 0;
2342
- for (let i = 0; i <= lineNumber && i < lines.length; i++) {
2343
- position += lines[i].length + 1;
2344
- }
2345
- return position;
2346
- }
2347
- /**
2348
- * Get built-in uniform declarations
2349
- */
2350
- getBuiltInUniforms() {
2351
- return `// Core - Canvas & Timing
2352
- uniform vec2 u_resolution; // Canvas width and height in pixels
2353
- uniform float u_time; // Elapsed time in seconds since scene start
2354
- uniform float u_deltaTime; // Time elapsed since last frame in seconds
2355
- uniform int u_frame; // Current frame number
2356
- uniform float u_pixelRatio; // Device pixel ratio for high-DPI displays
2357
- uniform float u_fps; // Current frames per second
2358
-
2359
- // Mouse API
2360
- uniform vec2 u_mouse; // Mouse position in pixels (WebGL coords: bottom-left origin)
2361
- uniform bool u_mouseInCanvas; // True if mouse is inside canvas bounds
2362
- uniform bool u_mousePressed; // True if any mouse button is pressed
2363
- uniform bool u_mouseLeft; // True if left mouse button is pressed
2364
- uniform bool u_mouseRight; // True if right mouse button is pressed
2365
- uniform bool u_mouseMiddle; // True if middle mouse button is pressed
2366
- uniform vec2 u_mouseVelocity; // Mouse movement velocity in pixels per second
2367
-
2368
- // Keyboard API - Common keys
2369
- uniform bool u_keySpace; // True if spacebar is pressed
2370
- uniform bool u_keyShift; // True if Shift key is pressed
2371
- uniform bool u_keyCtrl; // True if Ctrl/Cmd key is pressed
2372
- uniform bool u_keyAlt; // True if Alt/Option key is pressed
2373
- uniform bool u_keyW; // True if W key is pressed
2374
- uniform bool u_keyA; // True if A key is pressed
2375
- uniform bool u_keyS; // True if S key is pressed
2376
- uniform bool u_keyD; // True if D key is pressed
2377
- uniform bool u_keyUp; // True if Up arrow key is pressed
2378
- uniform bool u_keyDown; // True if Down arrow key is pressed
2379
- uniform bool u_keyLeft; // True if Left arrow key is pressed
2380
- uniform bool u_keyRight; // True if Right arrow key is pressed
2381
-
2382
- // Touch API
2383
- uniform int u_touchCount; // Number of active touch points (0-5)
2384
- uniform vec2 u_touch0; // First touch point position in pixels
2385
- uniform vec2 u_touch1; // Second touch point position in pixels
2386
- uniform vec2 u_touch2; // Third touch point position in pixels
2387
- uniform vec2 u_touch3; // Fourth touch point position in pixels
2388
- uniform vec2 u_touch4; // Fifth touch point position in pixels
2389
-
2390
- // Audio
2391
- uniform float u_audioVolume; // RMS volume level (0-1)
2392
- uniform float u_audioPeak; // Peak volume level (0-1)
2393
- uniform float u_audioBass; // Bass frequency band level (0-1)
2394
- uniform float u_audioMid; // Mid frequency band level (0-1)
2395
- uniform float u_audioTreble; // Treble frequency band level (0-1)
2396
- uniform float u_audioSubBass; // Sub-bass frequency band 20-60 Hz (0-1)
2397
- uniform float u_audioLowMid; // Low-mid frequency band 250-500 Hz (0-1)
2398
- uniform float u_audioHighMid; // High-mid frequency band 2-4 kHz (0-1)
2399
- uniform float u_audioPresence; // Presence frequency band 4-6 kHz (0-1)
2400
- uniform float u_audioBrilliance; // Brilliance frequency band 6-20 kHz (0-1)
2401
- uniform sampler2D u_audioFFT; // FFT texture containing 512 frequency bins
2402
-
2403
- // Video
2404
- uniform sampler2D u_video; // Current video frame as texture
2405
- uniform vec2 u_videoResolution; // Video frame width and height in pixels
2406
- uniform float u_videoFrameRate; // Video frame rate in frames per second
2407
-
2408
- // Multi-Stream Compositor Support (using individual uniforms due to WebGL 1.0 limitations)
2409
- uniform int u_streamCount; // Number of available compositor input streams (0-8)
2410
- uniform sampler2D u_stream0; // Stream 0 texture
2411
- uniform sampler2D u_stream1; // Stream 1 texture
2412
- uniform sampler2D u_stream2; // Stream 2 texture
2413
- uniform sampler2D u_stream3; // Stream 3 texture
2414
- uniform sampler2D u_stream4; // Stream 4 texture
2415
- uniform sampler2D u_stream5; // Stream 5 texture
2416
- uniform sampler2D u_stream6; // Stream 6 texture
2417
- uniform sampler2D u_stream7; // Stream 7 texture
2418
- uniform vec2 u_stream0Resolution; // Stream 0 resolution
2419
- uniform vec2 u_stream1Resolution; // Stream 1 resolution
2420
- uniform vec2 u_stream2Resolution; // Stream 2 resolution
2421
- uniform vec2 u_stream3Resolution; // Stream 3 resolution
2422
- uniform vec2 u_stream4Resolution; // Stream 4 resolution
2423
- uniform vec2 u_stream5Resolution; // Stream 5 resolution
2424
- uniform vec2 u_stream6Resolution; // Stream 6 resolution
2425
- uniform vec2 u_stream7Resolution; // Stream 7 resolution
2426
- uniform bool u_stream0Connected; // Stream 0 connection status
2427
- uniform bool u_stream1Connected; // Stream 1 connection status
2428
- uniform bool u_stream2Connected; // Stream 2 connection status
2429
- uniform bool u_stream3Connected; // Stream 3 connection status
2430
- uniform bool u_stream4Connected; // Stream 4 connection status
2431
- uniform bool u_stream5Connected; // Stream 5 connection status
2432
- uniform bool u_stream6Connected; // Stream 6 connection status
2433
- uniform bool u_stream7Connected; // Stream 7 connection status
2434
-
2435
- // Device Video Support (device cameras)
2436
- uniform int u_deviceCount; // Number of device videos (0-8)
2437
- uniform sampler2D u_device0; // Device 0 camera texture
2438
- uniform sampler2D u_device1; // Device 1 camera texture
2439
- uniform sampler2D u_device2; // Device 2 camera texture
2440
- uniform sampler2D u_device3; // Device 3 camera texture
2441
- uniform sampler2D u_device4; // Device 4 camera texture
2442
- uniform sampler2D u_device5; // Device 5 camera texture
2443
- uniform sampler2D u_device6; // Device 6 camera texture
2444
- uniform sampler2D u_device7; // Device 7 camera texture
2445
- uniform vec2 u_device0Resolution; // Device 0 resolution
2446
- uniform vec2 u_device1Resolution; // Device 1 resolution
2447
- uniform vec2 u_device2Resolution; // Device 2 resolution
2448
- uniform vec2 u_device3Resolution; // Device 3 resolution
2449
- uniform vec2 u_device4Resolution; // Device 4 resolution
2450
- uniform vec2 u_device5Resolution; // Device 5 resolution
2451
- uniform vec2 u_device6Resolution; // Device 6 resolution
2452
- uniform vec2 u_device7Resolution; // Device 7 resolution
2453
- uniform bool u_device0Connected; // Device 0 connection status
2454
- uniform bool u_device1Connected; // Device 1 connection status
2455
- uniform bool u_device2Connected; // Device 2 connection status
2456
- uniform bool u_device3Connected; // Device 3 connection status
2457
- uniform bool u_device4Connected; // Device 4 connection status
2458
- uniform bool u_device5Connected; // Device 5 connection status
2459
- uniform bool u_device6Connected; // Device 6 connection status
2460
- uniform bool u_device7Connected; // Device 7 connection status
2461
-
2462
- // CV - Face Detection
2463
- uniform int u_faceCount; // Number of detected faces (0-1)
2464
- uniform vec4 u_face0Bounds; // First face bounding box (x, y, width, height)
2465
- uniform vec3 u_face0HeadPose; // First face head rotation (pitch, yaw, roll) in radians
2466
- uniform float u_face0Confidence; // First face detection confidence (0-1)
2467
- uniform float u_face0Happy; // First face happy expression confidence (0-1)
2468
- uniform float u_face0Sad; // First face sad expression confidence (0-1)
2469
- uniform float u_face0Angry; // First face angry expression confidence (0-1)
2470
- uniform float u_face0Surprised; // First face surprised expression confidence (0-1)
2471
-
2472
- // CV - Hand Tracking
2473
- uniform int u_handCount; // Number of detected hands (0-2)
2474
- uniform vec3 u_leftHandPalm; // Left hand palm position (x, y, z)
2475
- uniform vec3 u_rightHandPalm; // Right hand palm position (x, y, z)
2476
- uniform float u_leftHandFist; // Left hand fist gesture confidence (0-1)
2477
- uniform float u_leftHandOpen; // Left hand open palm gesture confidence (0-1)
2478
- uniform float u_rightHandFist; // Right hand fist gesture confidence (0-1)
2479
- uniform float u_rightHandOpen; // Right hand open palm gesture confidence (0-1)
2480
-
2481
- // CV - Pose Detection
2482
- uniform bool u_poseDetected; // True if a pose is currently detected
2483
- uniform vec2 u_nosePosition; // Nose landmark position in pixels
2484
- uniform vec2 u_leftWristPosition; // Left wrist landmark position in pixels
2485
- uniform vec2 u_rightWristPosition; // Right wrist landmark position in pixels
2486
- uniform vec2 u_leftAnklePosition; // Left ankle landmark position in pixels
2487
- uniform vec2 u_rightAnklePosition; // Right ankle landmark position in pixels
2488
-
2489
- // CV - Segmentation
2490
- uniform sampler2D u_segmentationMask; // Body segmentation mask texture (0=background, 1=person)
2491
- uniform vec2 u_segmentationRes; // Segmentation mask resolution in pixels
2492
-
2493
- // Device Sensors - Internal Device (viji.device)
2494
- uniform vec3 u_deviceAcceleration; // Acceleration without gravity (m/s²) - x, y, z
2495
- uniform vec3 u_deviceAccelerationGravity; // Acceleration with gravity (m/s²) - x, y, z
2496
- uniform vec3 u_deviceRotationRate; // Rotation rate (deg/s) - alpha, beta, gamma
2497
- uniform vec3 u_deviceOrientation; // Device orientation (degrees) - alpha, beta, gamma
2498
- uniform bool u_deviceOrientationAbsolute; // True if orientation uses magnetometer (compass)
2499
-
2500
- // Device Sensors - External Devices (viji.devices[0-7])
2501
- uniform int u_externalDeviceCount; // Number of connected external devices (0-8)
2502
- uniform vec3 u_device0Acceleration; // Device 0 acceleration without gravity (m/s²)
2503
- uniform vec3 u_device0AccelerationGravity; // Device 0 acceleration with gravity (m/s²)
2504
- uniform vec3 u_device0RotationRate; // Device 0 rotation rate (deg/s)
2505
- uniform vec3 u_device0Orientation; // Device 0 orientation (degrees) - alpha, beta, gamma
2506
- uniform vec3 u_device1Acceleration; // Device 1 acceleration without gravity (m/s²)
2507
- uniform vec3 u_device1AccelerationGravity; // Device 1 acceleration with gravity (m/s²)
2508
- uniform vec3 u_device1RotationRate; // Device 1 rotation rate (deg/s)
2509
- uniform vec3 u_device1Orientation; // Device 1 orientation (degrees) - alpha, beta, gamma
2510
- uniform vec3 u_device2Acceleration; // Device 2 acceleration without gravity (m/s²)
2511
- uniform vec3 u_device2AccelerationGravity; // Device 2 acceleration with gravity (m/s²)
2512
- uniform vec3 u_device2RotationRate; // Device 2 rotation rate (deg/s)
2513
- uniform vec3 u_device2Orientation; // Device 2 orientation (degrees) - alpha, beta, gamma
2514
- uniform vec3 u_device3Acceleration; // Device 3 acceleration without gravity (m/s²)
2515
- uniform vec3 u_device3AccelerationGravity; // Device 3 acceleration with gravity (m/s²)
2516
- uniform vec3 u_device3RotationRate; // Device 3 rotation rate (deg/s)
2517
- uniform vec3 u_device3Orientation; // Device 3 orientation (degrees) - alpha, beta, gamma
2518
- uniform vec3 u_device4Acceleration; // Device 4 acceleration without gravity (m/s²)
2519
- uniform vec3 u_device4AccelerationGravity; // Device 4 acceleration with gravity (m/s²)
2520
- uniform vec3 u_device4RotationRate; // Device 4 rotation rate (deg/s)
2521
- uniform vec3 u_device4Orientation; // Device 4 orientation (degrees) - alpha, beta, gamma
2522
- uniform vec3 u_device5Acceleration; // Device 5 acceleration without gravity (m/s²)
2523
- uniform vec3 u_device5AccelerationGravity; // Device 5 acceleration with gravity (m/s²)
2524
- uniform vec3 u_device5RotationRate; // Device 5 rotation rate (deg/s)
2525
- uniform vec3 u_device5Orientation; // Device 5 orientation (degrees) - alpha, beta, gamma
2526
- uniform vec3 u_device6Acceleration; // Device 6 acceleration without gravity (m/s²)
2527
- uniform vec3 u_device6AccelerationGravity; // Device 6 acceleration with gravity (m/s²)
2528
- uniform vec3 u_device6RotationRate; // Device 6 rotation rate (deg/s)
2529
- uniform vec3 u_device6Orientation; // Device 6 orientation (degrees) - alpha, beta, gamma
2530
- uniform vec3 u_device7Acceleration; // Device 7 acceleration without gravity (m/s²)
2531
- uniform vec3 u_device7AccelerationGravity; // Device 7 acceleration with gravity (m/s²)
2532
- uniform vec3 u_device7RotationRate; // Device 7 rotation rate (deg/s)
2533
- uniform vec3 u_device7Orientation; // Device 7 orientation (degrees) - alpha, beta, gamma
2534
-
2535
- // Backbuffer (previous frame feedback)
2536
- ${this.backbufferEnabled ? "uniform sampler2D backbuffer; // Previous frame texture for feedback effects" : "// backbuffer not enabled"}
2537
- `;
2538
- }
2539
- /**
2540
- * Compile and link shader program
2541
- */
2542
- compileAndLinkShader(fragmentShaderCode) {
2543
- const gl = this.gl;
2544
- const vertexShaderCode = this.glslVersion === "glsl300" ? `#version 300 es
2545
- precision mediump float;
2546
- in vec2 a_position;
2547
- void main() {
2548
- gl_Position = vec4(a_position, 0.0, 1.0);
2549
- }` : `attribute vec2 a_position;
2550
- void main() {
2551
- gl_Position = vec4(a_position, 0.0, 1.0);
2552
- }`;
2553
- const vertexShader = this.compileShader(gl.VERTEX_SHADER, vertexShaderCode);
2554
- const fragmentShader = this.compileShader(gl.FRAGMENT_SHADER, fragmentShaderCode);
2555
- const program = gl.createProgram();
2556
- if (!program) {
2557
- throw new Error("Failed to create WebGL program");
2558
- }
2559
- gl.attachShader(program, vertexShader);
2560
- gl.attachShader(program, fragmentShader);
2561
- gl.linkProgram(program);
2562
- if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
2563
- const error = gl.getProgramInfoLog(program);
2564
- throw new Error(`Shader program link failed: ${error}`);
2565
- }
2566
- this.program = program;
2567
- gl.useProgram(program);
2568
- gl.deleteShader(vertexShader);
2569
- gl.deleteShader(fragmentShader);
2570
- }
2571
- /**
2572
- * Compile a shader
2573
- */
2574
- compileShader(type, source) {
2575
- const gl = this.gl;
2576
- const shader = gl.createShader(type);
2577
- if (!shader) {
2578
- throw new Error("Failed to create shader");
2579
- }
2580
- gl.shaderSource(shader, source);
2581
- gl.compileShader(shader);
2582
- if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
2583
- const error = gl.getShaderInfoLog(shader);
2584
- const shaderType = type === gl.VERTEX_SHADER ? "vertex" : "fragment";
2585
- throw new Error(`${shaderType} shader compilation failed:
2586
- ${error}`);
2587
- }
2588
- return shader;
2589
- }
2590
- /**
2591
- * Cache uniform locations for fast access
2592
- */
2593
- cacheUniformLocations() {
2594
- if (!this.program) return;
2595
- const gl = this.gl;
2596
- const numUniforms = gl.getProgramParameter(this.program, gl.ACTIVE_UNIFORMS);
2597
- const streamUniforms = [];
2598
- for (let i = 0; i < numUniforms; i++) {
2599
- const info = gl.getActiveUniform(this.program, i);
2600
- if (info) {
2601
- const location = gl.getUniformLocation(this.program, info.name);
2602
- this.uniformLocations.set(info.name, location);
2603
- if (info.name.includes("stream") || info.name.includes("u_stream")) {
2604
- streamUniforms.push(info.name);
2605
- }
2606
- }
2607
- }
2608
- if (streamUniforms.length > 0) {
2609
- console.log("[ShaderAdapter] Stream-related uniforms found:", streamUniforms);
2610
- } else {
2611
- console.log("[ShaderAdapter] NO stream-related uniforms found in shader!");
2612
- }
2613
- }
2614
- /**
2615
- * Reserve texture units for special textures
2616
- */
2617
- reserveTextureUnits() {
2618
- this.textureUnits.set("u_audioFFT", this.nextTextureUnit++);
2619
- this.textureUnits.set("u_video", this.nextTextureUnit++);
2620
- this.textureUnits.set("u_segmentationMask", this.nextTextureUnit++);
2621
- for (let i = 0; i < ShaderWorkerAdapter.MAX_STREAMS; i++) {
2622
- this.textureUnits.set(`u_stream${i}`, this.nextTextureUnit++);
2623
- }
2624
- if (this.backbufferEnabled) {
2625
- this.textureUnits.set("backbuffer", this.nextTextureUnit++);
2626
- }
2627
- }
2628
- /**
2629
- * Create ping-pong framebuffers for backbuffer support
2630
- */
2631
- createBackbufferFramebuffers() {
2632
- const gl = this.gl;
2633
- const width = gl.canvas.width;
2634
- const height = gl.canvas.height;
2635
- const createFBOTexture = () => {
2636
- const texture = gl.createTexture();
2637
- gl.bindTexture(gl.TEXTURE_2D, texture);
2638
- gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
2639
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
2640
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
2641
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
2642
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
2643
- const framebuffer = gl.createFramebuffer();
2644
- gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
2645
- gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);
2646
- return { framebuffer, texture };
2647
- };
2648
- const fbo1 = createFBOTexture();
2649
- const fbo2 = createFBOTexture();
2650
- this.backbufferFramebuffer = fbo1.framebuffer;
2651
- this.backbufferTexture = fbo1.texture;
2652
- this.currentFramebuffer = fbo2.framebuffer;
2653
- this.currentTexture = fbo2.texture;
2654
- gl.bindFramebuffer(gl.FRAMEBUFFER, null);
2655
- gl.bindTexture(gl.TEXTURE_2D, null);
2656
- }
2657
- /**
2658
- * Main render method
2659
- */
2660
- render(viji, parameterObjects) {
2661
- const gl = this.gl;
2662
- if (!this.program || !this.quadBuffer) {
2663
- console.warn("Shader not initialized");
2664
- return;
2665
- }
2666
- gl.useProgram(this.program);
2667
- this.updateBuiltInUniforms(viji);
2668
- this.updateParameterUniforms(parameterObjects);
2669
- if (this.backbufferEnabled && this.backbufferTexture) {
2670
- const backbufferUnit = this.textureUnits.get("backbuffer");
2671
- if (backbufferUnit !== void 0) {
2672
- gl.activeTexture(gl.TEXTURE0 + backbufferUnit);
2673
- gl.bindTexture(gl.TEXTURE_2D, this.backbufferTexture);
2674
- this.setUniform("backbuffer", "sampler2D", backbufferUnit);
2675
- }
2676
- gl.bindFramebuffer(gl.FRAMEBUFFER, this.currentFramebuffer);
2677
- }
2678
- const positionLocation = gl.getAttribLocation(this.program, "a_position");
2679
- gl.bindBuffer(gl.ARRAY_BUFFER, this.quadBuffer);
2680
- gl.enableVertexAttribArray(positionLocation);
2681
- gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 0, 0);
2682
- gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
2683
- if (this.backbufferEnabled) {
2684
- gl.bindFramebuffer(gl.FRAMEBUFFER, null);
2685
- gl.activeTexture(gl.TEXTURE0);
2686
- gl.bindTexture(gl.TEXTURE_2D, this.currentTexture);
2687
- gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
2688
- const tempFB = this.backbufferFramebuffer;
2689
- const tempTex = this.backbufferTexture;
2690
- this.backbufferFramebuffer = this.currentFramebuffer;
2691
- this.backbufferTexture = this.currentTexture;
2692
- this.currentFramebuffer = tempFB;
2693
- this.currentTexture = tempTex;
2694
- }
2695
- }
2696
- /**
2697
- * Update built-in uniforms from viji object
2698
- */
2699
- updateBuiltInUniforms(viji) {
2700
- this.setUniform("u_resolution", "vec2", [viji.width, viji.height]);
2701
- this.setUniform("u_time", "float", viji.time);
2702
- this.setUniform("u_deltaTime", "float", viji.deltaTime);
2703
- this.setUniform("u_frame", "int", viji.frameCount);
2704
- this.setUniform("u_pixelRatio", "float", viji.pixelRatio);
2705
- this.setUniform("u_fps", "float", viji.fps);
2706
- this.setUniform("u_mouse", "vec2", [viji.mouse.x, viji.height - viji.mouse.y]);
2707
- this.setUniform("u_mouseInCanvas", "bool", viji.mouse.isInCanvas);
2708
- this.setUniform("u_mousePressed", "bool", viji.mouse.isPressed);
2709
- this.setUniform("u_mouseLeft", "bool", viji.mouse.leftButton);
2710
- this.setUniform("u_mouseRight", "bool", viji.mouse.rightButton);
2711
- this.setUniform("u_mouseMiddle", "bool", viji.mouse.middleButton);
2712
- this.setUniform("u_mouseVelocity", "vec2", [viji.mouse.velocity.x, -viji.mouse.velocity.y]);
2713
- this.setUniform("u_keySpace", "bool", viji.keyboard.isPressed(" ") || viji.keyboard.isPressed("space"));
2714
- this.setUniform("u_keyShift", "bool", viji.keyboard.shift);
2715
- this.setUniform("u_keyCtrl", "bool", viji.keyboard.ctrl);
2716
- this.setUniform("u_keyAlt", "bool", viji.keyboard.alt);
2717
- this.setUniform("u_keyW", "bool", viji.keyboard.isPressed("w") || viji.keyboard.isPressed("W"));
2718
- this.setUniform("u_keyA", "bool", viji.keyboard.isPressed("a") || viji.keyboard.isPressed("A"));
2719
- this.setUniform("u_keyS", "bool", viji.keyboard.isPressed("s") || viji.keyboard.isPressed("S"));
2720
- this.setUniform("u_keyD", "bool", viji.keyboard.isPressed("d") || viji.keyboard.isPressed("D"));
2721
- this.setUniform("u_keyUp", "bool", viji.keyboard.isPressed("ArrowUp"));
2722
- this.setUniform("u_keyDown", "bool", viji.keyboard.isPressed("ArrowDown"));
2723
- this.setUniform("u_keyLeft", "bool", viji.keyboard.isPressed("ArrowLeft"));
2724
- this.setUniform("u_keyRight", "bool", viji.keyboard.isPressed("ArrowRight"));
2725
- this.setUniform("u_touchCount", "int", viji.touches.count);
2726
- for (let i = 0; i < 5; i++) {
2727
- const touch = viji.touches.points[i];
2728
- if (touch) {
2729
- this.setUniform(`u_touch${i}`, "vec2", [touch.x, viji.height - touch.y]);
2730
- } else {
2731
- this.setUniform(`u_touch${i}`, "vec2", [0, 0]);
2732
- }
2733
- }
2734
- const audio = viji.audio;
2735
- this.setUniform("u_audioVolume", "float", audio.volume?.current || 0);
2736
- this.setUniform("u_audioPeak", "float", audio.volume?.peak || 0);
2737
- this.setUniform("u_audioBass", "float", audio.bands?.low || 0);
2738
- this.setUniform("u_audioMid", "float", audio.bands?.mid || 0);
2739
- this.setUniform("u_audioTreble", "float", audio.bands?.high || 0);
2740
- this.setUniform("u_audioSubBass", "float", audio.bands?.low || 0);
2741
- this.setUniform("u_audioLowMid", "float", audio.bands?.lowMid || 0);
2742
- this.setUniform("u_audioHighMid", "float", audio.bands?.highMid || 0);
2743
- this.setUniform("u_audioPresence", "float", audio.bands?.highMid || 0);
2744
- this.setUniform("u_audioBrilliance", "float", audio.bands?.high || 0);
2745
- if (audio.isConnected) {
2746
- this.updateAudioFFTTexture(audio.getFrequencyData());
2747
- }
2748
- const video = viji.video;
2749
- if (video.isConnected && video.currentFrame) {
2750
- this.updateVideoTexture(video.currentFrame);
2751
- this.setUniform("u_videoResolution", "vec2", [video.frameWidth, video.frameHeight]);
2752
- this.setUniform("u_videoFrameRate", "float", video.frameRate);
2753
- } else {
2754
- this.setUniform("u_videoResolution", "vec2", [0, 0]);
2755
- this.setUniform("u_videoFrameRate", "float", 0);
2756
- }
2757
- const streams = viji.streams || [];
2758
- const streamCount = Math.min(streams.length, ShaderWorkerAdapter.MAX_STREAMS);
2759
- this.setUniform("u_streamCount", "int", streamCount);
2760
- for (let i = 0; i < ShaderWorkerAdapter.MAX_STREAMS; i++) {
2761
- const connectedUniform = `u_stream${i}Connected`;
2762
- const resolutionUniform = `u_stream${i}Resolution`;
2763
- if (i < streamCount && streams[i]?.isConnected && streams[i]?.currentFrame) {
2764
- this.updateStreamTexture(i, streams[i].currentFrame);
2765
- this.setUniform(
2766
- resolutionUniform,
2767
- "vec2",
2768
- [streams[i].frameWidth, streams[i].frameHeight]
2769
- );
2770
- this.setUniform(connectedUniform, "bool", true);
2771
- } else {
2772
- this.setUniform(resolutionUniform, "vec2", [0, 0]);
2773
- this.setUniform(connectedUniform, "bool", false);
2774
- }
2775
- }
2776
- const devices = viji.devices || [];
2777
- const deviceCount = Math.min(devices.length, ShaderWorkerAdapter.MAX_DEVICE_VIDEOS);
2778
- this.setUniform("u_deviceCount", "int", deviceCount);
2779
- for (let i = 0; i < ShaderWorkerAdapter.MAX_DEVICE_VIDEOS; i++) {
2780
- const connectedUniform = `u_device${i}Connected`;
2781
- const resolutionUniform = `u_device${i}Resolution`;
2782
- if (i < deviceCount && devices[i]?.video?.isConnected && devices[i].video.currentFrame) {
2783
- this.updateDeviceTexture(i, devices[i].video.currentFrame);
2784
- this.setUniform(
2785
- resolutionUniform,
2786
- "vec2",
2787
- [devices[i].video.frameWidth, devices[i].video.frameHeight]
2788
- );
2789
- this.setUniform(connectedUniform, "bool", true);
2790
- } else {
2791
- this.setUniform(resolutionUniform, "vec2", [0, 0]);
2792
- this.setUniform(connectedUniform, "bool", false);
2793
- }
2794
- }
2795
- const faces = video.faces || [];
2796
- this.setUniform("u_faceCount", "int", faces.length);
2797
- if (faces.length > 0) {
2798
- const face = faces[0];
2799
- this.setUniform("u_face0Bounds", "vec4", [face.bounds.x, face.bounds.y, face.bounds.width, face.bounds.height]);
2800
- this.setUniform("u_face0HeadPose", "vec3", [face.headPose.pitch, face.headPose.yaw, face.headPose.roll]);
2801
- this.setUniform("u_face0Confidence", "float", face.confidence);
2802
- this.setUniform("u_face0Happy", "float", face.expressions.happy);
2803
- this.setUniform("u_face0Sad", "float", face.expressions.sad);
2804
- this.setUniform("u_face0Angry", "float", face.expressions.angry);
2805
- this.setUniform("u_face0Surprised", "float", face.expressions.surprised);
2806
- } else {
2807
- this.setUniform("u_face0Bounds", "vec4", [0, 0, 0, 0]);
2808
- this.setUniform("u_face0HeadPose", "vec3", [0, 0, 0]);
2809
- this.setUniform("u_face0Confidence", "float", 0);
2810
- this.setUniform("u_face0Happy", "float", 0);
2811
- this.setUniform("u_face0Sad", "float", 0);
2812
- this.setUniform("u_face0Angry", "float", 0);
2813
- this.setUniform("u_face0Surprised", "float", 0);
2814
- }
2815
- const hands = video.hands || [];
2816
- this.setUniform("u_handCount", "int", hands.length);
2817
- const leftHand = hands.find((h) => h.handedness === "left");
2818
- const rightHand = hands.find((h) => h.handedness === "right");
2819
- if (leftHand) {
2820
- this.setUniform("u_leftHandPalm", "vec3", [leftHand.palm.x, leftHand.palm.y, leftHand.palm.z]);
2821
- this.setUniform("u_leftHandFist", "float", leftHand.gestures?.fist || 0);
2822
- this.setUniform("u_leftHandOpen", "float", leftHand.gestures?.openPalm || 0);
2823
- } else {
2824
- this.setUniform("u_leftHandPalm", "vec3", [0, 0, 0]);
2825
- this.setUniform("u_leftHandFist", "float", 0);
2826
- this.setUniform("u_leftHandOpen", "float", 0);
2827
- }
2828
- if (rightHand) {
2829
- this.setUniform("u_rightHandPalm", "vec3", [rightHand.palm.x, rightHand.palm.y, rightHand.palm.z]);
2830
- this.setUniform("u_rightHandFist", "float", rightHand.gestures?.fist || 0);
2831
- this.setUniform("u_rightHandOpen", "float", rightHand.gestures?.openPalm || 0);
2832
- } else {
2833
- this.setUniform("u_rightHandPalm", "vec3", [0, 0, 0]);
2834
- this.setUniform("u_rightHandFist", "float", 0);
2835
- this.setUniform("u_rightHandOpen", "float", 0);
2836
- }
2837
- const pose = video.pose;
2838
- this.setUniform("u_poseDetected", "bool", pose !== null);
2839
- if (pose) {
2840
- const nose = pose.landmarks[0];
2841
- const leftWrist = pose.landmarks[15];
2842
- const rightWrist = pose.landmarks[16];
2843
- const leftAnkle = pose.landmarks[27];
2844
- const rightAnkle = pose.landmarks[28];
2845
- this.setUniform("u_nosePosition", "vec2", [nose?.x || 0, nose?.y || 0]);
2846
- this.setUniform("u_leftWristPosition", "vec2", [leftWrist?.x || 0, leftWrist?.y || 0]);
2847
- this.setUniform("u_rightWristPosition", "vec2", [rightWrist?.x || 0, rightWrist?.y || 0]);
2848
- this.setUniform("u_leftAnklePosition", "vec2", [leftAnkle?.x || 0, leftAnkle?.y || 0]);
2849
- this.setUniform("u_rightAnklePosition", "vec2", [rightAnkle?.x || 0, rightAnkle?.y || 0]);
2850
- } else {
2851
- this.setUniform("u_nosePosition", "vec2", [0, 0]);
2852
- this.setUniform("u_leftWristPosition", "vec2", [0, 0]);
2853
- this.setUniform("u_rightWristPosition", "vec2", [0, 0]);
2854
- this.setUniform("u_leftAnklePosition", "vec2", [0, 0]);
2855
- this.setUniform("u_rightAnklePosition", "vec2", [0, 0]);
2856
- }
2857
- const segmentation = video.segmentation;
2858
- if (segmentation) {
2859
- this.updateSegmentationTexture(segmentation.mask, segmentation.width, segmentation.height);
2860
- this.setUniform("u_segmentationRes", "vec2", [segmentation.width, segmentation.height]);
2861
- } else {
2862
- this.setUniform("u_segmentationRes", "vec2", [0, 0]);
2863
- }
2864
- const internalDevice = viji.device;
2865
- if (internalDevice) {
2866
- const motion = internalDevice.motion;
2867
- if (motion?.acceleration) {
2868
- this.setUniform("u_deviceAcceleration", "vec3", [
2869
- motion.acceleration.x ?? 0,
2870
- motion.acceleration.y ?? 0,
2871
- motion.acceleration.z ?? 0
2872
- ]);
2873
- } else {
2874
- this.setUniform("u_deviceAcceleration", "vec3", [0, 0, 0]);
2875
- }
2876
- if (motion?.accelerationIncludingGravity) {
2877
- this.setUniform("u_deviceAccelerationGravity", "vec3", [
2878
- motion.accelerationIncludingGravity.x ?? 0,
2879
- motion.accelerationIncludingGravity.y ?? 0,
2880
- motion.accelerationIncludingGravity.z ?? 0
2881
- ]);
2882
- } else {
2883
- this.setUniform("u_deviceAccelerationGravity", "vec3", [0, 0, 0]);
2884
- }
2885
- if (motion?.rotationRate) {
2886
- this.setUniform("u_deviceRotationRate", "vec3", [
2887
- motion.rotationRate.alpha ?? 0,
2888
- motion.rotationRate.beta ?? 0,
2889
- motion.rotationRate.gamma ?? 0
2890
- ]);
2891
- } else {
2892
- this.setUniform("u_deviceRotationRate", "vec3", [0, 0, 0]);
2893
- }
2894
- const orientation = internalDevice.orientation;
2895
- if (orientation) {
2896
- this.setUniform("u_deviceOrientation", "vec3", [
2897
- orientation.alpha ?? 0,
2898
- orientation.beta ?? 0,
2899
- orientation.gamma ?? 0
2900
- ]);
2901
- this.setUniform("u_deviceOrientationAbsolute", "bool", orientation.absolute);
2902
- } else {
2903
- this.setUniform("u_deviceOrientation", "vec3", [0, 0, 0]);
2904
- this.setUniform("u_deviceOrientationAbsolute", "bool", false);
2905
- }
2906
- } else {
2907
- this.setUniform("u_deviceAcceleration", "vec3", [0, 0, 0]);
2908
- this.setUniform("u_deviceAccelerationGravity", "vec3", [0, 0, 0]);
2909
- this.setUniform("u_deviceRotationRate", "vec3", [0, 0, 0]);
2910
- this.setUniform("u_deviceOrientation", "vec3", [0, 0, 0]);
2911
- this.setUniform("u_deviceOrientationAbsolute", "bool", false);
2912
- }
2913
- const externalDevices = viji.devices || [];
2914
- const externalDeviceCount = Math.min(externalDevices.length, ShaderWorkerAdapter.MAX_EXTERNAL_DEVICES);
2915
- this.setUniform("u_externalDeviceCount", "int", externalDeviceCount);
2916
- for (let i = 0; i < ShaderWorkerAdapter.MAX_EXTERNAL_DEVICES; i++) {
2917
- if (i < externalDeviceCount) {
2918
- const device = externalDevices[i];
2919
- const motion = device.motion;
2920
- if (motion?.acceleration) {
2921
- this.setUniform(`u_device${i}Acceleration`, "vec3", [
2922
- motion.acceleration.x ?? 0,
2923
- motion.acceleration.y ?? 0,
2924
- motion.acceleration.z ?? 0
2925
- ]);
2926
- } else {
2927
- this.setUniform(`u_device${i}Acceleration`, "vec3", [0, 0, 0]);
2928
- }
2929
- if (motion?.accelerationIncludingGravity) {
2930
- this.setUniform(`u_device${i}AccelerationGravity`, "vec3", [
2931
- motion.accelerationIncludingGravity.x ?? 0,
2932
- motion.accelerationIncludingGravity.y ?? 0,
2933
- motion.accelerationIncludingGravity.z ?? 0
2934
- ]);
2935
- } else {
2936
- this.setUniform(`u_device${i}AccelerationGravity`, "vec3", [0, 0, 0]);
2937
- }
2938
- if (motion?.rotationRate) {
2939
- this.setUniform(`u_device${i}RotationRate`, "vec3", [
2940
- motion.rotationRate.alpha ?? 0,
2941
- motion.rotationRate.beta ?? 0,
2942
- motion.rotationRate.gamma ?? 0
2943
- ]);
2944
- } else {
2945
- this.setUniform(`u_device${i}RotationRate`, "vec3", [0, 0, 0]);
2946
- }
2947
- const orientation = device.orientation;
2948
- if (orientation) {
2949
- this.setUniform(`u_device${i}Orientation`, "vec3", [
2950
- orientation.alpha ?? 0,
2951
- orientation.beta ?? 0,
2952
- orientation.gamma ?? 0
2953
- ]);
2954
- } else {
2955
- this.setUniform(`u_device${i}Orientation`, "vec3", [0, 0, 0]);
2956
- }
2957
- } else {
2958
- this.setUniform(`u_device${i}Acceleration`, "vec3", [0, 0, 0]);
2959
- this.setUniform(`u_device${i}AccelerationGravity`, "vec3", [0, 0, 0]);
2960
- this.setUniform(`u_device${i}RotationRate`, "vec3", [0, 0, 0]);
2961
- this.setUniform(`u_device${i}Orientation`, "vec3", [0, 0, 0]);
2962
- }
2963
- }
2964
- }
2965
- /**
2966
- * Update parameter uniforms from parameter objects
2967
- */
2968
- updateParameterUniforms(parameterObjects) {
2969
- for (const param of this.parameters) {
2970
- const paramObj = parameterObjects.get(param.uniformName);
2971
- if (!paramObj) {
2972
- if (Math.random() < 0.01) {
2973
- console.log(`[ShaderAdapter] Parameter '${param.uniformName}' not found in parameterObjects`);
2974
- }
2975
- continue;
2976
- }
2977
- const value = paramObj.value;
2978
- switch (param.type) {
2979
- case "slider":
2980
- case "number":
2981
- this.setUniform(param.uniformName, "float", value);
2982
- break;
2983
- case "color":
2984
- const rgb = this.hexToRgb(value);
2985
- this.setUniform(param.uniformName, "vec3", rgb);
2986
- break;
2987
- case "toggle":
2988
- this.setUniform(param.uniformName, "bool", value);
2989
- break;
2990
- case "select":
2991
- const index = param.config.options?.indexOf(value) || 0;
2992
- this.setUniform(param.uniformName, "int", index);
2993
- break;
2994
- case "image":
2995
- if (value) {
2996
- this.updateImageTexture(param.uniformName, value);
2997
- }
2998
- break;
2999
- }
3000
- }
3001
- }
3002
- /**
3003
- * Set uniform value
3004
- */
3005
- setUniform(name, type, value) {
3006
- const location = this.uniformLocations.get(name);
3007
- if (location === null || location === void 0) {
3008
- if (name.includes("[") && Math.random() < 0.01) {
3009
- console.log(`[ShaderAdapter] Uniform '${name}' not found (location is ${location})`);
3010
- }
3011
- return;
3012
- }
3013
- const gl = this.gl;
3014
- switch (type) {
3015
- case "float":
3016
- gl.uniform1f(location, value);
3017
- break;
3018
- case "int":
3019
- gl.uniform1i(location, value);
3020
- break;
3021
- case "bool":
3022
- gl.uniform1i(location, value ? 1 : 0);
3023
- break;
3024
- case "vec2":
3025
- gl.uniform2f(location, value[0], value[1]);
3026
- break;
3027
- case "vec3":
3028
- gl.uniform3f(location, value[0], value[1], value[2]);
3029
- break;
3030
- case "vec4":
3031
- gl.uniform4f(location, value[0], value[1], value[2], value[3]);
3032
- break;
3033
- }
3034
- }
3035
- /**
3036
- * Convert hex color to RGB [0-1]
3037
- */
3038
- hexToRgb(hex) {
3039
- const result = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex);
3040
- if (result) {
3041
- return [
3042
- parseInt(result[1], 16) / 255,
3043
- parseInt(result[2], 16) / 255,
3044
- parseInt(result[3], 16) / 255
3045
- ];
3046
- }
3047
- return [0, 0, 0];
3048
- }
3049
- /**
3050
- * Update audio FFT texture
3051
- */
3052
- updateAudioFFTTexture(frequencyData) {
3053
- const gl = this.gl;
3054
- const unit = this.textureUnits.get("u_audioFFT");
3055
- if (!this.audioFFTTexture) {
3056
- this.audioFFTTexture = gl.createTexture();
3057
- }
3058
- gl.activeTexture(gl.TEXTURE0 + unit);
3059
- gl.bindTexture(gl.TEXTURE_2D, this.audioFFTTexture);
3060
- gl.texImage2D(
3061
- gl.TEXTURE_2D,
3062
- 0,
3063
- gl.LUMINANCE,
3064
- frequencyData.length,
3065
- 1,
3066
- 0,
3067
- gl.LUMINANCE,
3068
- gl.UNSIGNED_BYTE,
3069
- frequencyData
3070
- );
3071
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
3072
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
3073
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
3074
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
3075
- const location = this.uniformLocations.get("u_audioFFT");
3076
- if (location) {
3077
- gl.uniform1i(location, unit);
3078
- }
3079
- }
3080
- /**
3081
- * Update video texture
3082
- * Supports both OffscreenCanvas and ImageBitmap
3083
- */
3084
- updateVideoTexture(videoFrame) {
3085
- const gl = this.gl;
3086
- const unit = this.textureUnits.get("u_video");
3087
- if (!this.videoTexture) {
3088
- this.videoTexture = gl.createTexture();
3089
- }
3090
- gl.activeTexture(gl.TEXTURE0 + unit);
3091
- gl.bindTexture(gl.TEXTURE_2D, this.videoTexture);
3092
- const shouldFlip = videoFrame instanceof OffscreenCanvas;
3093
- if (shouldFlip) {
3094
- gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
3095
- }
3096
- gl.texImage2D(
3097
- gl.TEXTURE_2D,
3098
- 0,
3099
- gl.RGBA,
3100
- gl.RGBA,
3101
- gl.UNSIGNED_BYTE,
3102
- videoFrame
3103
- );
3104
- if (shouldFlip) {
3105
- gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
3106
- }
3107
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
3108
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
3109
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
3110
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
3111
- const location = this.uniformLocations.get("u_video");
3112
- if (location) {
3113
- gl.uniform1i(location, unit);
3114
- }
3115
- if (videoFrame instanceof ImageBitmap) {
3116
- videoFrame.close();
3117
- }
3118
- }
3119
- /**
3120
- * Update compositor stream texture at specified index
3121
- * Supports both OffscreenCanvas and ImageBitmap for zero-copy pipeline
3122
- */
3123
- updateStreamTexture(index, streamFrame) {
3124
- const gl = this.gl;
3125
- const uniformName = `u_stream${index}`;
3126
- const unit = this.textureUnits.get(uniformName);
3127
- if (!this.streamTextures[index]) {
3128
- this.streamTextures[index] = gl.createTexture();
3129
- }
3130
- gl.activeTexture(gl.TEXTURE0 + unit);
3131
- gl.bindTexture(gl.TEXTURE_2D, this.streamTextures[index]);
3132
- const shouldFlip = streamFrame instanceof OffscreenCanvas;
3133
- if (shouldFlip) {
3134
- gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
3135
- }
3136
- gl.texImage2D(
3137
- gl.TEXTURE_2D,
3138
- 0,
3139
- gl.RGBA,
3140
- gl.RGBA,
3141
- gl.UNSIGNED_BYTE,
3142
- streamFrame
3143
- );
3144
- if (shouldFlip) {
3145
- gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
3146
- }
3147
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
3148
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
3149
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
3150
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
3151
- const location = this.uniformLocations.get(uniformName);
3152
- if (location) {
3153
- gl.uniform1i(location, unit);
3154
- }
3155
- if (streamFrame instanceof ImageBitmap) {
3156
- streamFrame.close();
3157
- }
3158
- }
3159
- /**
3160
- * Update device video texture
3161
- */
3162
- updateDeviceTexture(index, frame) {
3163
- if (!this.gl || index >= ShaderWorkerAdapter.MAX_DEVICE_VIDEOS) return;
3164
- if (!this.deviceTextures[index]) {
3165
- this.deviceTextures[index] = this.gl.createTexture();
3166
- }
3167
- const texture = this.deviceTextures[index];
3168
- if (!texture) return;
3169
- const textureUnit = 8 + index;
3170
- this.gl.activeTexture(this.gl.TEXTURE0 + textureUnit);
3171
- this.gl.bindTexture(this.gl.TEXTURE_2D, texture);
3172
- const shouldFlip = frame instanceof OffscreenCanvas;
3173
- if (shouldFlip) {
3174
- this.gl.pixelStorei(this.gl.UNPACK_FLIP_Y_WEBGL, true);
3175
- }
3176
- this.gl.texImage2D(
3177
- this.gl.TEXTURE_2D,
3178
- 0,
3179
- this.gl.RGBA,
3180
- this.gl.RGBA,
3181
- this.gl.UNSIGNED_BYTE,
3182
- frame
3183
- );
3184
- if (shouldFlip) {
3185
- this.gl.pixelStorei(this.gl.UNPACK_FLIP_Y_WEBGL, false);
3186
- }
3187
- this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_MIN_FILTER, this.gl.LINEAR);
3188
- this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_MAG_FILTER, this.gl.LINEAR);
3189
- this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_WRAP_S, this.gl.CLAMP_TO_EDGE);
3190
- this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_WRAP_T, this.gl.CLAMP_TO_EDGE);
3191
- if (frame instanceof ImageBitmap) {
3192
- frame.close();
3193
- }
3194
- }
3195
- /**
3196
- * Update segmentation mask texture
3197
- */
3198
- updateSegmentationTexture(mask, width, height) {
3199
- const gl = this.gl;
3200
- const unit = this.textureUnits.get("u_segmentationMask");
3201
- if (!this.segmentationTexture) {
3202
- this.segmentationTexture = gl.createTexture();
3203
- }
3204
- gl.activeTexture(gl.TEXTURE0 + unit);
3205
- gl.bindTexture(gl.TEXTURE_2D, this.segmentationTexture);
3206
- gl.texImage2D(
3207
- gl.TEXTURE_2D,
3208
- 0,
3209
- gl.LUMINANCE,
3210
- width,
3211
- height,
3212
- 0,
3213
- gl.LUMINANCE,
3214
- gl.UNSIGNED_BYTE,
3215
- mask
3216
- );
3217
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
3218
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
3219
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
3220
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
3221
- const location = this.uniformLocations.get("u_segmentationMask");
3222
- if (location) {
3223
- gl.uniform1i(location, unit);
3224
- }
3225
- }
3226
- /**
3227
- * Update image parameter texture
3228
- */
3229
- updateImageTexture(name, imageBitmap) {
3230
- const gl = this.gl;
3231
- if (!this.textureUnits.has(name)) {
3232
- this.textureUnits.set(name, this.nextTextureUnit++);
3233
- }
3234
- const unit = this.textureUnits.get(name);
3235
- if (!this.textures.has(name)) {
3236
- const texture2 = gl.createTexture();
3237
- if (texture2) {
3238
- this.textures.set(name, texture2);
3239
- }
3240
- }
3241
- const texture = this.textures.get(name);
3242
- if (!texture) return;
3243
- gl.activeTexture(gl.TEXTURE0 + unit);
3244
- gl.bindTexture(gl.TEXTURE_2D, texture);
3245
- gl.texImage2D(
3246
- gl.TEXTURE_2D,
3247
- 0,
3248
- gl.RGBA,
3249
- gl.RGBA,
3250
- gl.UNSIGNED_BYTE,
3251
- imageBitmap
3252
- );
3253
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
3254
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
3255
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
3256
- gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
3257
- const location = this.uniformLocations.get(name);
3258
- if (location) {
3259
- gl.uniform1i(location, unit);
3260
- }
3261
- }
3262
- /**
3263
- * Handle canvas resize
3264
- */
3265
- resize(width, height) {
3266
- const gl = this.gl;
3267
- gl.viewport(0, 0, width, height);
3268
- if (this.backbufferEnabled) {
3269
- this.createBackbufferFramebuffers();
3270
- }
3271
- }
3272
- /**
3273
- * Get parameter definitions for host
3274
- */
3275
- getParameterDefinitions() {
3276
- return this.parameters;
3277
- }
3278
- /**
3279
- * Cleanup resources
3280
- */
3281
- destroy() {
3282
- const gl = this.gl;
3283
- if (this.program) {
3284
- gl.deleteProgram(this.program);
3285
- this.program = null;
3286
- }
3287
- if (this.audioFFTTexture) gl.deleteTexture(this.audioFFTTexture);
3288
- if (this.videoTexture) gl.deleteTexture(this.videoTexture);
3289
- if (this.segmentationTexture) gl.deleteTexture(this.segmentationTexture);
3290
- for (const texture of this.streamTextures) {
3291
- if (texture) gl.deleteTexture(texture);
3292
- }
3293
- this.streamTextures = [];
3294
- for (const texture of this.deviceTextures) {
3295
- if (texture) gl.deleteTexture(texture);
3296
- }
3297
- this.deviceTextures = [];
3298
- for (const texture of this.textures.values()) {
3299
- if (texture) gl.deleteTexture(texture);
3300
- }
3301
- this.textures.clear();
3302
- if (this.backbufferFramebuffer) gl.deleteFramebuffer(this.backbufferFramebuffer);
3303
- if (this.backbufferTexture) gl.deleteTexture(this.backbufferTexture);
3304
- if (this.currentFramebuffer) gl.deleteFramebuffer(this.currentFramebuffer);
3305
- if (this.currentTexture) gl.deleteTexture(this.currentTexture);
3306
- if (this.quadBuffer) gl.deleteBuffer(this.quadBuffer);
3307
- }
3308
- }
3309
- class VijiWorkerRuntime {
3310
- canvas = null;
3311
- ctx = null;
3312
- gl = null;
3313
- isRunning = false;
3314
- frameCount = 0;
3315
- lastTime = 0;
3316
- startTime = 0;
3317
- frameRateMode = "full";
3318
- skipNextFrame = false;
3319
- screenRefreshRate = 60;
3320
- // Will be detected
3321
- // Debug logging control
3322
- debugMode = false;
3323
- // P5.js adapter for P5 mode
3324
- p5Adapter = null;
3325
- // Shader adapter for shader mode
3326
- shaderAdapter = null;
3327
- rendererType = "native";
3328
- // Pending capture requests (queue to handle multiple simultaneous requests)
3329
- pendingCaptures = [];
3330
- /**
3331
- * Enable or disable debug logging
3332
- */
3333
- setDebugMode(enabled) {
3334
- this.debugMode = enabled;
3335
- this.videoSystems.forEach((vs) => vs?.setDebugMode(enabled));
3336
- if (this.parameterSystem && "setDebugMode" in this.parameterSystem) {
3337
- this.parameterSystem.setDebugMode(enabled);
3338
- }
3339
- if (this.interactionSystem && "setDebugMode" in this.interactionSystem) {
3340
- this.interactionSystem.setDebugMode(enabled);
3341
- }
3342
- }
3343
- /**
3344
- * Debug logging helper
3345
- */
3346
- debugLog(message, ...args) {
3347
- if (this.debugMode) {
3348
- console.log(message, ...args);
3349
- }
3350
- }
3351
- // Effective refresh rate tracking
3352
- effectiveFrameTimes = [];
3353
- lastEffectiveRateReport = 0;
3354
- effectiveRateReportInterval = 1e3;
3355
- // Report every 1 second
3356
- // Parameter system
3357
- parameterSystem;
3358
- // Interaction system (Phase 7)
3359
- interactionSystem;
3360
- // Video systems (multi-stream) - sparse array with reserved index contract:
3361
- // Index 0: Reserved for main video (with CV) - may be undefined if no main video
3362
- // Index 1..N: Additional streams (no CV)
3363
- // Index N+1..: Device streams
3364
- // IMPORTANT: Index 0 is ALWAYS reserved even when main video is absent
3365
- videoSystems = [];
3366
- // Auto-capture for frame sources
3367
- autoCaptureEnabled = false;
3368
- autoCaptureFormat = { flipY: true };
3369
- // Default: flip for WebGL compatibility
3370
- // Audio state (Phase 5) - receives analysis results from host
3371
- audioState = {
3372
- isConnected: false,
3373
- volume: { current: 0, peak: 0, smoothed: 0 },
3374
- bands: {
3375
- low: 0,
3376
- lowMid: 0,
3377
- mid: 0,
3378
- highMid: 0,
3379
- high: 0,
3380
- lowSmoothed: 0,
3381
- lowMidSmoothed: 0,
3382
- midSmoothed: 0,
3383
- highMidSmoothed: 0,
3384
- highSmoothed: 0
3385
- },
3386
- beat: {
3387
- kick: 0,
3388
- snare: 0,
3389
- hat: 0,
3390
- any: 0,
3391
- kickSmoothed: 0,
3392
- snareSmoothed: 0,
3393
- anySmoothed: 0,
3394
- triggers: { any: false, kick: false, snare: false, hat: false },
3395
- bpm: 120,
3396
- phase: 0,
3397
- bar: 0,
3398
- confidence: 0,
3399
- isLocked: false
3400
- },
3401
- spectral: {
3402
- brightness: 0,
3403
- flatness: 0,
3404
- flux: 0
3405
- },
3406
- frequencyData: new Uint8Array(0)
3407
- };
3408
- // Device sensor state (internal device + external devices)
3409
- deviceState = {
3410
- device: {
3411
- motion: null,
3412
- orientation: null
3413
- },
3414
- devices: []
3415
- };
3416
- // Map deviceId → streamIndex for O(1) device video lookup
3417
- deviceVideoMap = /* @__PURE__ */ new Map();
3418
- // Video state is now managed by the worker-side VideoSystem
3419
- // Artist API object
3420
- viji = {
3421
- // Canvas (will be set during init)
3422
- canvas: null,
3423
- ctx: null,
3424
- gl: null,
3425
- width: 0,
3426
- height: 0,
3427
- pixelRatio: 1,
3428
- // Timing
3429
- time: 0,
3430
- deltaTime: 0,
3431
- frameCount: 0,
3432
- fps: 60,
3433
- // Audio API (Phase 5) - will be set in constructor
3434
- audio: {},
3435
- // Main video stream (index 0, CV enabled)
3436
- video: {
3437
- isConnected: false,
3438
- currentFrame: null,
3439
- frameWidth: 0,
3440
- frameHeight: 0,
3441
- frameRate: 0,
3442
- getFrameData: () => null,
3443
- faces: [],
3444
- hands: [],
3445
- pose: null,
3446
- segmentation: null,
3447
- cv: {}
3448
- },
3449
- // Additional video streams (index 1+, no CV)
3450
- streams: [],
3451
- // Interaction APIs will be added during construction
3452
- mouse: {},
3453
- keyboard: {},
3454
- touches: {},
3455
- // Device sensor APIs (internal device + external devices)
3456
- device: {
3457
- motion: null,
3458
- orientation: null
3459
- },
3460
- devices: [],
3461
- // Parameter helper functions (return parameter objects) - delegate to parameter system
3462
- slider: (defaultValue, config) => {
3463
- return this.parameterSystem.createSliderParameter(defaultValue, config);
3464
- },
3465
- color: (defaultValue, config) => {
3466
- return this.parameterSystem.createColorParameter(defaultValue, config);
3467
- },
3468
- toggle: (defaultValue, config) => {
3469
- return this.parameterSystem.createToggleParameter(defaultValue, config);
3470
- },
3471
- select: (defaultValue, config) => {
3472
- return this.parameterSystem.createSelectParameter(defaultValue, config);
3473
- },
3474
- text: (defaultValue, config) => {
3475
- return this.parameterSystem.createTextParameter(defaultValue, config);
3476
- },
3477
- number: (defaultValue, config) => {
3478
- return this.parameterSystem.createNumberParameter(defaultValue, config);
3479
- },
3480
- image: (defaultValue, config) => {
3481
- return this.parameterSystem.createImageParameter(defaultValue, config);
3482
- },
3483
- // Context selection
3484
- useContext: (type) => {
3485
- if (type === "2d") {
3486
- if (!this.ctx && this.canvas) {
3487
- this.ctx = this.canvas.getContext("2d");
3488
- this.viji.ctx = this.ctx;
3489
- }
3490
- return this.ctx;
3491
- } else if (type === "webgl") {
3492
- if (!this.gl && this.canvas) {
3493
- this.gl = this.canvas.getContext("webgl2") || this.canvas.getContext("webgl");
3494
- this.viji.gl = this.gl;
3495
- if (this.gl) {
3496
- this.gl.viewport(0, 0, this.viji.width, this.viji.height);
3497
- }
3498
- }
3499
- return this.gl;
3500
- }
3501
- return null;
3502
- }
3503
- };
3504
- constructor() {
3505
- this.parameterSystem = new ParameterSystem((type, data) => {
3506
- this.postMessage(type, data);
3507
- });
3508
- this.interactionSystem = new InteractionSystem();
3509
- Object.assign(this.viji, this.interactionSystem.getInteractionAPIs());
3510
- this.viji.audio = {
3511
- ...this.audioState,
3512
- getFrequencyData: () => this.audioState.frequencyData
3513
- };
3514
- this.setupMessageHandling();
3515
- }
3516
- /**
3517
- * Initialize P5.js mode
3518
- * Sets up P5 rendering with P5WorkerAdapter
3519
- */
3520
- async initP5Mode(setup, render) {
3521
- try {
3522
- this.rendererType = "p5";
3523
- this.debugLog("🎨 Initializing P5.js mode...");
3524
- this.p5Adapter = new P5WorkerAdapter(
3525
- this.canvas,
3526
- this.viji,
3527
- {
3528
- setup,
3529
- render
3530
- }
3531
- );
3532
- await this.p5Adapter.init();
3533
- this.debugLog("✅ P5.js mode initialized successfully");
3534
- } catch (error) {
3535
- console.error("❌ Failed to initialize P5.js mode:", error);
3536
- this.postMessage("error", {
3537
- message: `P5.js initialization failed: ${error.message}`,
3538
- code: "P5_INIT_ERROR"
3539
- });
3540
- this.rendererType = "native";
3541
- this.p5Adapter = null;
3542
- }
3543
- }
3544
- /**
3545
- * Initialize shader rendering mode
3546
- * Used when artist code includes // @renderer shader
3547
- */
3548
- async initShaderMode(shaderCode) {
3549
- try {
3550
- this.rendererType = "shader";
3551
- this.debugLog("🎨 Initializing Shader mode...");
3552
- this.shaderAdapter = new ShaderWorkerAdapter(
3553
- this.canvas,
3554
- this.viji,
3555
- shaderCode
3556
- );
3557
- await this.shaderAdapter.init();
3558
- const parameterDefinitions = this.shaderAdapter.getParameterDefinitions();
3559
- for (const param of parameterDefinitions) {
3560
- this.registerShaderParameter(param);
3561
- }
3562
- this.debugLog("✅ Shader mode initialized successfully");
3563
- } catch (error) {
3564
- console.error("❌ Failed to initialize Shader mode:", error);
3565
- this.postMessage("error", {
3566
- message: `Shader initialization failed: ${error.message}`,
3567
- code: "SHADER_INIT_ERROR"
3568
- });
3569
- this.rendererType = "native";
3570
- this.shaderAdapter = null;
3571
- }
3572
- }
3573
- /**
3574
- * Register a shader parameter with the parameter system
3575
- * In the parameter system, the 'label' serves as the parameter name/key
3576
- */
3577
- registerShaderParameter(param) {
3578
- const config = param.config;
3579
- const paramConfig = {
3580
- ...config,
3581
- label: param.uniformName,
3582
- // uniformName becomes the parameter key
3583
- description: config.label ? `${config.label}${config.description ? ": " + config.description : ""}` : config.description
3584
- };
3585
- switch (param.type) {
3586
- case "slider":
3587
- this.viji.slider(config.default, paramConfig);
3588
- break;
3589
- case "number":
3590
- this.viji.number(config.default, paramConfig);
3591
- break;
3592
- case "color":
3593
- this.viji.color(config.default, paramConfig);
3594
- break;
3595
- case "toggle":
3596
- this.viji.toggle(config.default, paramConfig);
3597
- break;
3598
- case "select":
3599
- this.viji.select(config.default, {
3600
- ...paramConfig,
3601
- options: config.options
3602
- });
3603
- break;
3604
- case "image":
3605
- this.viji.image(null, paramConfig);
3606
- break;
3607
- }
3608
- }
3609
- // Reset parameter state (called when loading new scene)
3610
- resetParameterState() {
3611
- this.parameterSystem.resetParameterState();
3612
- this.interactionSystem.resetInteractionState();
3613
- this.audioState = {
3614
- isConnected: false,
3615
- volume: { current: 0, peak: 0, smoothed: 0 },
3616
- bands: {
3617
- low: 0,
3618
- lowMid: 0,
3619
- mid: 0,
3620
- highMid: 0,
3621
- high: 0,
3622
- lowSmoothed: 0,
3623
- lowMidSmoothed: 0,
3624
- midSmoothed: 0,
3625
- highMidSmoothed: 0,
3626
- highSmoothed: 0
3627
- },
3628
- beat: {
3629
- kick: 0,
3630
- snare: 0,
3631
- hat: 0,
3632
- any: 0,
3633
- kickSmoothed: 0,
3634
- snareSmoothed: 0,
3635
- anySmoothed: 0,
3636
- triggers: { any: false, kick: false, snare: false, hat: false },
3637
- bpm: 120,
3638
- phase: 0,
3639
- bar: 0,
3640
- confidence: 0,
3641
- isLocked: false
3642
- },
3643
- spectral: {
3644
- brightness: 0,
3645
- flatness: 0,
3646
- flux: 0
3647
- },
3648
- frequencyData: new Uint8Array(0)
3649
- };
3650
- this.viji.audio = {
3651
- ...this.audioState,
3652
- getFrequencyData: () => this.audioState.frequencyData
3653
- };
3654
- this.videoSystems.forEach((vs) => vs?.resetVideoState());
3655
- if (this.videoSystems[0]) {
3656
- Object.assign(this.viji.video, this.videoSystems[0].getVideoAPI());
3657
- }
3658
- this.updateVijiStreams();
3659
- }
3660
- /**
3661
- * Updates viji.streams from videoSystems array
3662
- * Filters only 'additional' type streams (excludes main and device streams)
3663
- */
3664
- updateVijiStreams() {
3665
- const freshStreams = this.videoSystems.filter((vs) => vs && vs.getStreamType() === "additional").map((vs) => vs.getVideoAPI());
3666
- this.viji.streams.length = freshStreams.length;
3667
- for (let i = 0; i < freshStreams.length; i++) {
3668
- this.viji.streams[i] = freshStreams[i];
3669
- }
3670
- }
3671
- // Send all parameters (from helper functions) to host
3672
- sendAllParametersToHost() {
3673
- this.parameterSystem.sendAllParametersToHost();
3674
- }
3675
- setupMessageHandling() {
3676
- self.onmessage = (event) => {
3677
- const message = event.data;
3678
- switch (message.type) {
3679
- case "init":
3680
- this.handleInit(message);
3681
- break;
3682
- case "frame-rate-update":
3683
- this.handleFrameRateUpdate(message);
3684
- break;
3685
- case "refresh-rate-update":
3686
- this.handleRefreshRateUpdate(message);
3687
- break;
3688
- case "cv-frame-rate-update":
3689
- this.handleCVFrameRateUpdate(message);
3690
- break;
3691
- case "resolution-update":
3692
- this.handleResolutionUpdate(message);
3693
- break;
3694
- case "set-scene-code":
3695
- this.handleSetSceneCode(message);
3696
- break;
3697
- case "debug-mode":
3698
- this.setDebugMode(message.data.enabled);
3699
- break;
3700
- case "parameter-update":
3701
- this.handleParameterUpdate(message);
3702
- break;
3703
- case "parameter-batch-update":
3704
- this.handleParameterBatchUpdate(message);
3705
- break;
3706
- case "stream-update":
3707
- this.handleStreamUpdate(message);
3708
- break;
3709
- case "audio-analysis-update":
3710
- this.handleAudioAnalysisUpdate(message);
3711
- break;
3712
- case "video-canvas-setup":
3713
- this.handleVideoCanvasSetup(message);
3714
- break;
3715
- case "video-frame-update":
3716
- this.handleVideoFrameUpdate(message);
3717
- break;
3718
- case "video-config-update":
3719
- this.handleVideoConfigUpdate(message);
3720
- break;
3721
- case "video-streams-prepare":
3722
- this.handleVideoStreamsPrepare(message);
3723
- break;
3724
- case "video-frame-direct":
3725
- this.handleVideoFrameDirect(message);
3726
- break;
3727
- case "enable-auto-capture":
3728
- this.autoCaptureEnabled = message.data.enabled;
3729
- if (message.data.flipY !== void 0) {
3730
- this.autoCaptureFormat.flipY = message.data.flipY;
3731
- this.debugLog(`[AutoCapture] ${message.data.enabled ? "ENABLED" : "DISABLED"} with flipY=${message.data.flipY}`);
3732
- } else {
3733
- this.debugLog(`[AutoCapture] ${message.data.enabled ? "ENABLED" : "DISABLED"}`);
3734
- }
3735
- break;
3736
- case "mouse-update":
3737
- this.handleMouseUpdate(message);
3738
- break;
3739
- case "keyboard-update":
3740
- this.handleKeyboardUpdate(message);
3741
- break;
3742
- case "touch-update":
3743
- this.handleTouchUpdate(message);
3744
- break;
3745
- case "interaction-enabled":
3746
- this.handleInteractionEnabled(message);
3747
- break;
3748
- case "device-state-update":
3749
- this.handleDeviceStateUpdate(message);
3750
- break;
3751
- case "performance-update":
3752
- this.handlePerformanceUpdate(message);
3753
- break;
3754
- case "capture-frame":
3755
- this.handleCaptureFrame(message);
3756
- break;
3757
- }
3758
- };
3759
- }
3760
- handleInit(message) {
3761
- try {
3762
- this.canvas = message.data.canvas;
3763
- this.viji.canvas = this.canvas;
3764
- this.viji.width = this.canvas.width;
3765
- this.viji.height = this.canvas.height;
3766
- this.startRenderLoop();
3767
- this.postMessage("init-response", {
3768
- id: message.id
3769
- });
3770
- } catch (error) {
3771
- this.postMessage("error", {
3772
- id: message.id,
3773
- message: error.message,
3774
- code: "INIT_ERROR"
3775
- });
3776
- }
3777
- }
3778
- handleFrameRateUpdate(message) {
3779
- if (message.data && message.data.mode) {
3780
- this.frameRateMode = message.data.mode;
3781
- this.debugLog("Frame rate mode updated to:", message.data.mode);
3782
- }
3783
- }
3784
- handleRefreshRateUpdate(message) {
3785
- if (message.data && message.data.screenRefreshRate) {
3786
- this.screenRefreshRate = message.data.screenRefreshRate;
3787
- this.debugLog("Screen refresh rate updated to:", message.data.screenRefreshRate + "Hz");
3788
- }
3789
- }
3790
- handleCVFrameRateUpdate(message) {
3791
- if (message.data && message.data.mode) {
3792
- const sceneProcessingFPS = this.frameRateMode === "full" ? this.screenRefreshRate : this.screenRefreshRate / 2;
3793
- if (this.videoSystems[0]) {
3794
- this.videoSystems[0].handleVideoConfigUpdate({
3795
- cvFrameRate: {
3796
- mode: message.data.mode,
3797
- sceneTargetFPS: sceneProcessingFPS
3798
- },
3799
- timestamp: performance.now()
3800
- });
3801
- }
3802
- this.debugLog(`CV frame rate updated to: ${message.data.mode} of ${sceneProcessingFPS} FPS scene processing`);
3803
- }
3804
- }
3805
- trackEffectiveFrameTime(currentTime) {
3806
- this.effectiveFrameTimes.push(currentTime);
3807
- if (this.effectiveFrameTimes.length > 60) {
3808
- this.effectiveFrameTimes.shift();
3809
- }
3810
- }
3811
- reportPerformanceStats(currentTime) {
3812
- if (currentTime - this.lastEffectiveRateReport >= this.effectiveRateReportInterval) {
3813
- if (this.effectiveFrameTimes.length >= 2) {
3814
- const totalTime = this.effectiveFrameTimes[this.effectiveFrameTimes.length - 1] - this.effectiveFrameTimes[0];
3815
- const frameCount = this.effectiveFrameTimes.length - 1;
3816
- const effectiveRefreshRate = Math.round(frameCount / totalTime * 1e3);
3817
- const cvStats = this.videoSystems[0]?.getCVStats() || {
3818
- activeFeatures: [],
3819
- processingTime: 0,
3820
- actualFPS: 0,
3821
- isProcessing: false
3822
- };
3823
- this.postMessage("performance-update", {
3824
- effectiveRefreshRate,
3825
- frameRateMode: this.frameRateMode,
3826
- screenRefreshRate: this.screenRefreshRate,
3827
- rendererType: this.rendererType,
3828
- parameterCount: this.parameterSystem.getParameterCount(),
3829
- // Include CV stats if available
3830
- cv: cvStats ? {
3831
- activeFeatures: cvStats.activeFeatures,
3832
- processingTime: cvStats.processingTime,
3833
- targetFPS: cvStats.effectiveFPS,
3834
- actualFPS: cvStats.actualFPS,
3835
- isProcessing: cvStats.isProcessing
3836
- } : void 0
3837
- });
3838
- }
3839
- this.lastEffectiveRateReport = currentTime;
3840
- }
3841
- }
3842
- handleResolutionUpdate(message) {
3843
- if (message.data) {
3844
- if (this.canvas) {
3845
- this.canvas.width = Math.round(message.data.effectiveWidth);
3846
- this.canvas.height = Math.round(message.data.effectiveHeight);
3847
- }
3848
- this.viji.width = Math.round(message.data.effectiveWidth);
3849
- this.viji.height = Math.round(message.data.effectiveHeight);
3850
- if (this.gl) {
3851
- this.gl.viewport(0, 0, this.viji.width, this.viji.height);
3852
- }
3853
- if (this.p5Adapter && this.rendererType === "p5") {
3854
- this.p5Adapter.resize(this.viji.width, this.viji.height);
3855
- }
3856
- if (this.shaderAdapter && this.rendererType === "shader") {
3857
- this.shaderAdapter.resize(this.viji.width, this.viji.height);
3858
- }
3859
- this.debugLog("Canvas resolution updated to:", this.viji.width + "x" + this.viji.height);
3860
- }
3861
- }
3862
- handleParameterUpdate(message) {
3863
- if (message.data && message.data.name !== void 0 && message.data.value !== void 0) {
3864
- this.parameterSystem.updateParameterValue(message.data.name, message.data.value);
3865
- }
3866
- }
3867
- handleParameterBatchUpdate(message) {
3868
- if (message.data && message.data.updates) {
3869
- for (const update of message.data.updates) {
3870
- this.parameterSystem.updateParameterValue(update.name, update.value);
3871
- }
3872
- this.parameterSystem.markInitialValuesSynced();
3873
- this.debugLog("Parameter system initialized successfully");
3874
- }
3875
- }
3876
- handleStreamUpdate(message) {
3877
- this.debugLog("Stream update:", message.data);
3878
- }
3879
- handleAudioAnalysisUpdate(message) {
3880
- const events = message.data.beat.events || [];
3881
- const triggers = {
3882
- kick: events.some((e) => e.type === "kick"),
3883
- snare: events.some((e) => e.type === "snare"),
3884
- hat: events.some((e) => e.type === "hat"),
3885
- any: events.length > 0
3886
- };
3887
- this.audioState = {
3888
- isConnected: message.data.isConnected,
3889
- volume: message.data.volume,
3890
- bands: message.data.bands,
3891
- beat: {
3892
- ...message.data.beat,
3893
- triggers
3894
- // Add derived triggers (reliable!)
3895
- },
3896
- spectral: message.data.spectral,
3897
- frequencyData: new Uint8Array(message.data.frequencyData)
3898
- };
3899
- this.viji.audio = {
3900
- ...this.audioState,
3901
- getFrequencyData: () => this.audioState.frequencyData
3902
- };
3903
- }
3904
- handleVideoCanvasSetup(message) {
3905
- const { streamIndex, streamType, deviceId } = message.data;
3906
- const index = streamIndex || 0;
3907
- const videoSystem = new VideoSystem();
3908
- videoSystem.setDebugMode(this.debugMode);
3909
- videoSystem.setStreamType(streamType, deviceId);
3910
- videoSystem.handleCanvasSetup({
3911
- offscreenCanvas: message.data.offscreenCanvas,
3912
- width: message.data.width,
3913
- height: message.data.height,
3914
- timestamp: message.data.timestamp
3915
- });
3916
- this.videoSystems[index] = videoSystem;
3917
- this.debugLog(`Video system setup at index ${index}, type: ${streamType}${deviceId ? `, deviceId: ${deviceId}` : ""}`);
3918
- switch (streamType) {
3919
- case "main":
3920
- Object.assign(this.viji.video, videoSystem.getVideoAPI());
3921
- break;
3922
- case "additional":
3923
- this.updateVijiStreams();
3924
- break;
3925
- case "device":
3926
- if (deviceId) {
3927
- this.deviceVideoMap.set(deviceId, index);
3928
- const device = this.viji.devices.find((d) => d.id === deviceId);
3929
- if (device) {
3930
- if (device.video) {
3931
- Object.assign(device.video, videoSystem.getVideoAPI());
3932
- } else {
3933
- device.video = videoSystem.getVideoAPI();
3934
- }
3935
- }
3936
- }
3937
- break;
3938
- }
3939
- }
3940
- handleVideoFrameUpdate(message) {
3941
- const index = message.data.streamIndex || 0;
3942
- const vs = this.videoSystems[index];
3943
- if (vs) {
3944
- vs.handleFrameUpdate({
3945
- imageBitmap: message.data.imageBitmap,
3946
- timestamp: message.data.timestamp
3947
- });
3948
- const streamType = vs.getStreamType();
3949
- if (streamType === "main") {
3950
- Object.assign(this.viji.video, vs.getVideoAPI());
3951
- } else if (streamType === "device") {
3952
- const deviceId = vs.getDeviceId();
3953
- if (deviceId) {
3954
- const device = this.viji.devices.find((d) => d.id === deviceId);
3955
- if (device) {
3956
- if (device.video) {
3957
- Object.assign(device.video, vs.getVideoAPI());
3958
- } else {
3959
- device.video = vs.getVideoAPI();
3960
- }
3961
- }
3962
- }
3963
- }
3964
- }
3965
- }
3966
- handleVideoConfigUpdate(message) {
3967
- const index = message.data.streamIndex || 0;
3968
- const vs = this.videoSystems[index];
3969
- if (vs) {
3970
- const streamType = vs.getStreamType();
3971
- const deviceId = vs.getDeviceId();
3972
- if (message.data.disconnect && streamType === "device" && deviceId) {
3973
- this.deviceVideoMap.delete(deviceId);
3974
- const device = this.viji.devices.find((d) => d.id === deviceId);
3975
- if (device) {
3976
- device.video = null;
3977
- }
3978
- }
3979
- vs.handleVideoConfigUpdate({
3980
- ...message.data.targetFrameRate && { targetFrameRate: message.data.targetFrameRate },
3981
- ...message.data.cvConfig && { cvConfig: message.data.cvConfig },
3982
- ...message.data.width && { width: message.data.width },
3983
- ...message.data.height && { height: message.data.height },
3984
- ...message.data.disconnect && { disconnect: message.data.disconnect },
3985
- timestamp: message.data.timestamp
3986
- });
3987
- if (!message.data.disconnect) {
3988
- if (streamType === "main") {
3989
- Object.assign(this.viji.video, vs.getVideoAPI());
3990
- } else if (streamType === "device" && deviceId) {
3991
- const device = this.viji.devices.find((d) => d.id === deviceId);
3992
- if (device) {
3993
- if (device.video) {
3994
- Object.assign(device.video, vs.getVideoAPI());
3995
- } else {
3996
- device.video = vs.getVideoAPI();
3997
- }
3998
- }
3999
- }
4000
- }
4001
- }
4002
- }
4003
- handleVideoStreamsPrepare(message) {
4004
- const { mainStream, mediaStreamCount, directFrameCount } = message.data;
4005
- const totalStreams = (mainStream ? 1 : 0) + mediaStreamCount + directFrameCount;
4006
- this.debugLog(`[Compositor] Preparing video streams: main=${mainStream}, media=${mediaStreamCount}, direct=${directFrameCount}, total=${totalStreams}`);
4007
- while (this.videoSystems.length < totalStreams) {
4008
- this.videoSystems.push(new VideoSystem());
4009
- }
4010
- const directFrameStartIndex = (mainStream ? 1 : 0) + mediaStreamCount;
4011
- for (let i = 0; i < directFrameCount; i++) {
4012
- const index = directFrameStartIndex + i;
4013
- if (!this.videoSystems[index]) {
4014
- this.videoSystems[index] = new VideoSystem();
4015
- }
4016
- this.videoSystems[index].setDebugMode(this.debugMode);
4017
- this.videoSystems[index].initializeForDirectFrames(this.rendererType);
4018
- }
4019
- this.updateVijiStreams();
4020
- this.debugLog(`Prepared ${totalStreams} video systems (${directFrameCount} direct frames)`);
4021
- }
4022
- handleVideoFrameDirect(message) {
4023
- const index = message.data.streamIndex || 0;
4024
- if (!this.videoSystems[index]) {
4025
- this.debugLog(`[Compositor] Creating new VideoSystem at index ${index} for direct frames`);
4026
- this.videoSystems[index] = new VideoSystem();
4027
- this.videoSystems[index].setDebugMode(this.debugMode);
4028
- this.videoSystems[index].initializeForDirectFrames(this.rendererType);
4029
- }
4030
- this.videoSystems[index].handleDirectFrame(message.data);
4031
- }
4032
- handlePerformanceUpdate(message) {
4033
- this.debugLog("Performance update:", message.data);
4034
- }
4035
- /**
4036
- * Handle capture-frame request from host.
4037
- * Defers capture to immediately after the next render completes to avoid race conditions.
4038
- */
4039
- async handleCaptureFrame(message) {
4040
- this.pendingCaptures.push(message);
4041
- this.debugLog(`Capture queued: ${message.data.format || "blob"} (${this.pendingCaptures.length} pending)`);
4042
- }
4043
- /**
4044
- * Execute a capture frame request immediately after render completes.
4045
- * This ensures we capture a fully rendered frame, avoiding race conditions.
4046
- */
4047
- async executeCaptureFrame(message) {
4048
- try {
4049
- if (!this.canvas) {
4050
- throw new Error("Canvas not initialized");
4051
- }
4052
- const format = message.data.format || "blob";
4053
- if (format === "bitmap") {
4054
- const bitmap = this.canvas.transferToImageBitmap();
4055
- self.postMessage({
4056
- type: "capture-frame-result",
4057
- id: message.id,
4058
- timestamp: Date.now(),
4059
- data: { bitmap }
4060
- }, [bitmap]);
4061
- return;
4062
- }
4063
- const mimeType = message.data.type || "image/jpeg";
4064
- const srcWidth = this.canvas.width;
4065
- const srcHeight = this.canvas.height;
4066
- let targetWidth = srcWidth;
4067
- let targetHeight = srcHeight;
4068
- if (typeof message.data.resolution === "number") {
4069
- const scale = message.data.resolution > 0 ? message.data.resolution : 1;
4070
- targetWidth = Math.max(1, Math.floor(srcWidth * scale));
4071
- targetHeight = Math.max(1, Math.floor(srcHeight * scale));
4072
- } else if (message.data.resolution && typeof message.data.resolution === "object") {
4073
- targetWidth = Math.max(1, Math.floor(message.data.resolution.width));
4074
- targetHeight = Math.max(1, Math.floor(message.data.resolution.height));
4075
- }
4076
- const srcAspect = srcWidth / srcHeight;
4077
- const dstAspect = targetWidth / targetHeight;
4078
- let sx = 0;
4079
- let sy = 0;
4080
- let sWidth = srcWidth;
4081
- let sHeight = srcHeight;
4082
- if (Math.abs(srcAspect - dstAspect) > 1e-6) {
4083
- if (dstAspect > srcAspect) {
4084
- sHeight = Math.floor(srcWidth / dstAspect);
4085
- sy = Math.floor((srcHeight - sHeight) / 2);
4086
- } else {
4087
- sWidth = Math.floor(srcHeight * dstAspect);
4088
- sx = Math.floor((srcWidth - sWidth) / 2);
4089
- }
4090
- }
4091
- let sourceCanvas;
4092
- const gl2 = this.canvas.getContext("webgl2");
4093
- const gl = gl2 || this.canvas.getContext("webgl");
4094
- if (gl) {
4095
- gl.finish();
4096
- const pixels = new Uint8Array(srcWidth * srcHeight * 4);
4097
- gl.readPixels(0, 0, srcWidth, srcHeight, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
4098
- const flippedPixels = new Uint8ClampedArray(srcWidth * srcHeight * 4);
4099
- for (let y = 0; y < srcHeight; y++) {
4100
- const srcRow = (srcHeight - 1 - y) * srcWidth * 4;
4101
- const dstRow = y * srcWidth * 4;
4102
- flippedPixels.set(pixels.subarray(srcRow, srcRow + srcWidth * 4), dstRow);
4103
- }
4104
- sourceCanvas = new OffscreenCanvas(srcWidth, srcHeight);
4105
- const sourceCtx = sourceCanvas.getContext("2d");
4106
- if (!sourceCtx) throw new Error("Failed to create source context");
4107
- const imageData = new ImageData(flippedPixels, srcWidth, srcHeight);
4108
- sourceCtx.putImageData(imageData, 0, 0);
4109
- this.debugLog("Captured frame from WebGL using readPixels (post-render)");
4110
- } else {
4111
- sourceCanvas = this.canvas;
4112
- this.debugLog("Captured frame from 2D canvas (post-render)");
4113
- }
4114
- const temp = new OffscreenCanvas(targetWidth, targetHeight);
4115
- const tctx = temp.getContext("2d");
4116
- if (!tctx) throw new Error("Failed to get 2D context");
4117
- tctx.drawImage(sourceCanvas, sx, sy, sWidth, sHeight, 0, 0, targetWidth, targetHeight);
4118
- const blob = await temp.convertToBlob({ type: mimeType });
4119
- self.postMessage({
4120
- type: "capture-frame-result",
4121
- id: message.id,
4122
- timestamp: Date.now(),
4123
- data: { blob }
4124
- });
4125
- } catch (error) {
4126
- this.postMessage("error", {
4127
- id: message.id,
4128
- message: error.message,
4129
- code: "CAPTURE_FRAME_ERROR"
4130
- });
4131
- }
4132
- }
4133
- handleSetSceneCode(message) {
4134
- if (message.data && message.data.sceneCode) {
4135
- self.setSceneCode(message.data.sceneCode);
4136
- }
4137
- }
4138
- startRenderLoop() {
4139
- this.isRunning = true;
4140
- this.startTime = performance.now();
4141
- this.lastTime = this.startTime;
4142
- this.renderFrame();
4143
- }
4144
- renderFrame() {
4145
- if (!this.isRunning) return;
4146
- const currentTime = performance.now();
4147
- this.updateVijiStreams();
4148
- this.viji.fps = this.frameRateMode === "full" ? this.screenRefreshRate : this.screenRefreshRate / 2;
4149
- let shouldRender = true;
4150
- if (this.frameRateMode === "half") {
4151
- shouldRender = !this.skipNextFrame;
4152
- this.skipNextFrame = !this.skipNextFrame;
4153
- }
4154
- if (shouldRender) {
4155
- this.viji.deltaTime = (currentTime - this.lastTime) / 1e3;
4156
- this.viji.time = (currentTime - this.startTime) / 1e3;
4157
- this.viji.frameCount = ++this.frameCount;
4158
- this.trackEffectiveFrameTime(currentTime);
4159
- this.lastTime = currentTime;
4160
- try {
4161
- if (this.shaderAdapter && this.rendererType === "shader") {
4162
- const parameterObjects = this.parameterSystem.getAllParameterObjects();
4163
- this.shaderAdapter.render(this.viji, parameterObjects);
4164
- } else if (this.p5Adapter && this.rendererType === "p5") {
4165
- const parameterObjects = this.parameterSystem.getAllParameterObjects();
4166
- this.p5Adapter.tick(this.viji, parameterObjects);
4167
- } else {
4168
- const renderFunction2 = self.renderFunction;
4169
- if (renderFunction2 && typeof renderFunction2 === "function") {
4170
- renderFunction2(this.viji);
4171
- }
4172
- }
4173
- } catch (error) {
4174
- console.error("Render error:", error);
4175
- this.postMessage("error", {
4176
- message: error.message,
4177
- code: "RENDER_ERROR",
4178
- stack: error.stack
4179
- });
4180
- }
4181
- if (this.pendingCaptures.length > 0) {
4182
- const captures = [...this.pendingCaptures];
4183
- this.pendingCaptures = [];
4184
- for (const captureMsg of captures) {
4185
- this.executeCaptureFrame(captureMsg).catch((error) => {
4186
- console.error("Capture execution error:", error);
4187
- });
4188
- }
4189
- }
4190
- if (this.autoCaptureEnabled && this.canvas) {
4191
- try {
4192
- const ctx = this.canvas.getContext("2d") || this.canvas.getContext("webgl2") || this.canvas.getContext("webgl");
4193
- if (ctx) {
4194
- const options = this.autoCaptureFormat.flipY ? { imageOrientation: "flipY" } : {};
4195
- createImageBitmap(this.canvas, options).then((bitmap) => {
4196
- self.postMessage({
4197
- type: "auto-capture-result",
4198
- timestamp: Date.now(),
4199
- data: { bitmap, timestamp: performance.now() }
4200
- }, [bitmap]);
4201
- }).catch((err) => {
4202
- console.warn("[AutoCapture] ImageBitmap creation failed:", err);
4203
- });
4204
- } else {
4205
- if (this.debugMode && this.frameCount % 60 === 0) {
4206
- this.debugLog("[AutoCapture] No context yet, skipping capture");
4207
- }
4208
- }
4209
- } catch (error) {
4210
- console.warn("[AutoCapture] Failed:", error);
4211
- }
4212
- }
4213
- }
4214
- this.reportPerformanceStats(currentTime);
4215
- this.interactionSystem.frameStart();
4216
- requestAnimationFrame(() => this.renderFrame());
4217
- }
4218
- postMessage(type, data) {
4219
- self.postMessage({
4220
- type,
4221
- id: data?.id || `${type}_${Date.now()}`,
4222
- timestamp: Date.now(),
4223
- data
4224
- });
4225
- }
4226
- // Phase 7: Interaction Message Handlers (delegated to InteractionSystem)
4227
- handleMouseUpdate(message) {
4228
- this.interactionSystem.handleMouseUpdate(message.data);
4229
- }
4230
- handleKeyboardUpdate(message) {
4231
- this.interactionSystem.handleKeyboardUpdate(message.data);
4232
- }
4233
- handleTouchUpdate(message) {
4234
- this.interactionSystem.handleTouchUpdate(message.data);
4235
- }
4236
- handleInteractionEnabled(message) {
4237
- this.interactionSystem.setInteractionEnabled(message.data.enabled);
4238
- }
4239
- handleDeviceStateUpdate(message) {
4240
- this.deviceState = message.data;
4241
- this.viji.device = this.deviceState.device;
4242
- const updatedDevices = this.deviceState.devices.map((deviceData) => {
4243
- const existingDevice = this.viji.devices.find((d) => d.id === deviceData.id);
4244
- if (existingDevice) {
4245
- existingDevice.name = deviceData.name;
4246
- existingDevice.motion = deviceData.motion;
4247
- existingDevice.orientation = deviceData.orientation;
4248
- return existingDevice;
4249
- } else {
4250
- const streamIndex = this.deviceVideoMap.get(deviceData.id);
4251
- const videoSystem = streamIndex !== void 0 ? this.videoSystems[streamIndex] : void 0;
4252
- return {
4253
- ...deviceData,
4254
- video: videoSystem ? videoSystem.getVideoAPI() : null
4255
- };
4256
- }
4257
- });
4258
- this.viji.devices = updatedDevices;
4259
- }
4260
- }
4261
- class SceneAnalyzer {
4262
- /**
4263
- * Detects the renderer type from scene code comments
4264
- *
4265
- * Looks for:
4266
- * - // @renderer shader
4267
- * - /* @renderer shader *\/
4268
- * - // @renderer p5
4269
- * - /* @renderer p5 *\/
4270
- *
4271
- * @param sceneCode - The artist's scene code to analyze
4272
- * @returns The detected renderer type ('shader', 'p5', or 'native')
4273
- */
4274
- static detectRendererType(sceneCode) {
4275
- if (/\/\/\s*@renderer\s+shader|\/\*\s*@renderer\s+shader\s*\*\//.test(sceneCode)) {
4276
- return "shader";
4277
- }
4278
- if (/\/\/\s*@renderer\s+p5|\/\*\s*@renderer\s+p5\s*\*\//.test(sceneCode)) {
4279
- return "p5";
4280
- }
4281
- return "native";
4282
- }
4283
- }
4284
- const runtime = new VijiWorkerRuntime();
4285
- let renderFunction = null;
4286
- async function setSceneCode(sceneCode) {
4287
- try {
4288
- runtime.resetParameterState();
4289
- const rendererType = SceneAnalyzer.detectRendererType(sceneCode);
4290
- if (rendererType === "shader") {
4291
- await runtime.initShaderMode(sceneCode);
4292
- runtime.sendAllParametersToHost();
4293
- } else if (rendererType === "p5") {
4294
- const functionBody = sceneCode + '\nreturn { setup: typeof setup !== "undefined" ? setup : null, render: typeof render !== "undefined" ? render : null };';
4295
- const sceneFunction = new Function("viji", "p5", functionBody);
4296
- const { setup, render } = sceneFunction(runtime.viji, null);
4297
- if (!render) {
4298
- throw new Error("P5 mode requires a render(viji, p5) function");
4299
- }
4300
- await runtime.initP5Mode(setup, render);
4301
- runtime.sendAllParametersToHost();
4302
- } else {
4303
- const functionBody = sceneCode + '\nif (typeof render === "function") {\n return render;\n}\nthrow new Error("Scene code must define a render function");';
4304
- const AsyncFunction = Object.getPrototypeOf(async function() {
4305
- }).constructor;
4306
- const sceneFunction = new AsyncFunction("viji", functionBody);
4307
- renderFunction = await sceneFunction(runtime.viji);
4308
- self.renderFunction = renderFunction;
4309
- runtime.sendAllParametersToHost();
4310
- }
4311
- } catch (error) {
4312
- console.error("Failed to load scene code:", error);
4313
- self.postMessage({
4314
- type: "error",
4315
- id: `scene_error_${Date.now()}`,
4316
- timestamp: Date.now(),
4317
- data: {
4318
- message: `Scene code error: ${error.message}`,
4319
- code: "SCENE_CODE_ERROR"
4320
- }
4321
- });
4322
- }
4323
- }
4324
- self.setSceneCode = setSceneCode;
4325
- //# sourceMappingURL=viji.worker-BnDb6mPh.js.map