@viji-dev/core 0.2.19 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1241,6 +1241,8 @@ class VideoSystem {
1241
1241
  cvScratchContext = null;
1242
1242
  // Debug logging control
1243
1243
  debugMode = false;
1244
+ // GPU-only mode flag (for shader compositor only - P5/Canvas need OffscreenCanvas)
1245
+ directGPUMode = false;
1244
1246
  /**
1245
1247
  * Enable or disable debug logging
1246
1248
  */
@@ -1286,9 +1288,13 @@ class VideoSystem {
1286
1288
  */
1287
1289
  getVideoAPI() {
1288
1290
  const cvResults = this.cvSystem.getResults();
1291
+ const currentFrame = this.videoState.currentFrame;
1292
+ if (this.directGPUMode && currentFrame instanceof ImageBitmap) {
1293
+ this.videoState.currentFrame = null;
1294
+ }
1289
1295
  return {
1290
1296
  isConnected: this.videoState.isConnected,
1291
- currentFrame: this.videoState.currentFrame,
1297
+ currentFrame,
1292
1298
  frameWidth: this.videoState.frameWidth,
1293
1299
  frameHeight: this.videoState.frameHeight,
1294
1300
  frameRate: this.videoState.frameRate,
@@ -1526,6 +1532,54 @@ class VideoSystem {
1526
1532
  getCVStats() {
1527
1533
  return this.cvSystem.getStats();
1528
1534
  }
1535
+ /**
1536
+ * Initialize for direct frame injection (no MediaStream)
1537
+ * Enables GPU-only mode for zero-copy pipeline (ImageBitmaps are pre-flipped at capture)
1538
+ */
1539
+ initializeForDirectFrames(consumerRendererType) {
1540
+ this.disconnectVideo();
1541
+ this.directGPUMode = consumerRendererType === "shader";
1542
+ if (this.directGPUMode) {
1543
+ this.debugLog("VideoSystem initialized in direct GPU mode (zero-copy, pre-flipped ImageBitmaps)");
1544
+ } else {
1545
+ this.offscreenCanvas = new OffscreenCanvas(1920, 1080);
1546
+ this.ctx = this.offscreenCanvas.getContext("2d", { willReadFrequently: true });
1547
+ if (!this.ctx) {
1548
+ throw new Error("Failed to get 2D context for direct frames");
1549
+ }
1550
+ this.videoState.currentFrame = this.offscreenCanvas;
1551
+ this.debugLog("VideoSystem initialized with canvas (P5/Canvas consumer)");
1552
+ }
1553
+ this.videoState.isConnected = false;
1554
+ }
1555
+ /**
1556
+ * Handle directly injected frame (zero-copy)
1557
+ */
1558
+ handleDirectFrame(data) {
1559
+ if (this.directGPUMode) {
1560
+ this.videoState.currentFrame = data.imageBitmap;
1561
+ this.videoState.frameWidth = data.imageBitmap.width;
1562
+ this.videoState.frameHeight = data.imageBitmap.height;
1563
+ this.videoState.isConnected = true;
1564
+ } else {
1565
+ if (!this.offscreenCanvas || !this.ctx) {
1566
+ this.debugLog("[Compositor] Initializing canvas for direct frames");
1567
+ this.offscreenCanvas = new OffscreenCanvas(data.imageBitmap.width, data.imageBitmap.height);
1568
+ this.ctx = this.offscreenCanvas.getContext("2d", { willReadFrequently: true });
1569
+ }
1570
+ if (this.offscreenCanvas.width !== data.imageBitmap.width || this.offscreenCanvas.height !== data.imageBitmap.height) {
1571
+ this.offscreenCanvas.width = data.imageBitmap.width;
1572
+ this.offscreenCanvas.height = data.imageBitmap.height;
1573
+ }
1574
+ this.ctx.drawImage(data.imageBitmap, 0, 0);
1575
+ this.videoState.currentFrame = this.offscreenCanvas;
1576
+ this.videoState.frameWidth = data.imageBitmap.width;
1577
+ this.videoState.frameHeight = data.imageBitmap.height;
1578
+ this.videoState.isConnected = true;
1579
+ this.processCurrentFrame(data.timestamp);
1580
+ data.imageBitmap.close();
1581
+ }
1582
+ }
1529
1583
  }
1530
1584
  class P5WorkerAdapter {
1531
1585
  constructor(offscreenCanvas, _vijiAPI, sceneCode) {
@@ -1770,6 +1824,49 @@ class P5WorkerAdapter {
1770
1824
  return null;
1771
1825
  }
1772
1826
  }
1827
+ /**
1828
+ * Wrap video frames in P5.js-compatible format
1829
+ * P5.js expects images to have {canvas, elt, width, height} structure
1830
+ * This wrapping is done per-frame for P5 scenes only, keeping the artist API unchanged
1831
+ */
1832
+ wrapVideoFramesForP5(vijiAPI) {
1833
+ if (vijiAPI.video?.currentFrame instanceof OffscreenCanvas) {
1834
+ const canvas = vijiAPI.video.currentFrame;
1835
+ vijiAPI.video.currentFrame = {
1836
+ canvas,
1837
+ elt: canvas,
1838
+ width: canvas.width,
1839
+ height: canvas.height
1840
+ };
1841
+ } else if (vijiAPI.video?.currentFrame instanceof ImageBitmap) {
1842
+ const bitmap = vijiAPI.video.currentFrame;
1843
+ vijiAPI.video.currentFrame = {
1844
+ elt: bitmap,
1845
+ width: bitmap.width,
1846
+ height: bitmap.height
1847
+ };
1848
+ }
1849
+ if (Array.isArray(vijiAPI.streams)) {
1850
+ for (const stream of vijiAPI.streams) {
1851
+ if (stream?.currentFrame instanceof OffscreenCanvas) {
1852
+ const canvas = stream.currentFrame;
1853
+ stream.currentFrame = {
1854
+ canvas,
1855
+ elt: canvas,
1856
+ width: canvas.width,
1857
+ height: canvas.height
1858
+ };
1859
+ } else if (stream?.currentFrame instanceof ImageBitmap) {
1860
+ const bitmap = stream.currentFrame;
1861
+ stream.currentFrame = {
1862
+ elt: bitmap,
1863
+ width: bitmap.width,
1864
+ height: bitmap.height
1865
+ };
1866
+ }
1867
+ }
1868
+ }
1869
+ }
1773
1870
  /**
1774
1871
  * Add .p5 property to image parameters for P5.js-specific rendering
1775
1872
  * This allows artists to use p5.image() while keeping .value for native canvas API
@@ -1810,6 +1907,7 @@ class P5WorkerAdapter {
1810
1907
  }
1811
1908
  try {
1812
1909
  this.addP5PropertyToImageParameters(parameterObjects);
1910
+ this.wrapVideoFramesForP5(vijiAPI);
1813
1911
  if (!this.artistSetupComplete && this.setupFn) {
1814
1912
  this.setupFn(vijiAPI, this.p5Instance);
1815
1913
  this.artistSetupComplete = true;
@@ -2032,6 +2130,8 @@ class ShaderWorkerAdapter {
2032
2130
  this.gl = gl;
2033
2131
  }
2034
2132
  }
2133
+ static MAX_STREAMS = 8;
2134
+ // Maximum number of compositor input streams
2035
2135
  gl;
2036
2136
  program = null;
2037
2137
  uniformLocations = /* @__PURE__ */ new Map();
@@ -2048,6 +2148,8 @@ class ShaderWorkerAdapter {
2048
2148
  audioFFTTexture = null;
2049
2149
  videoTexture = null;
2050
2150
  segmentationTexture = null;
2151
+ // Multi-stream textures
2152
+ streamTextures = [];
2051
2153
  // Backbuffer support (ping-pong framebuffers)
2052
2154
  backbufferFramebuffer = null;
2053
2155
  backbufferTexture = null;
@@ -2271,6 +2373,33 @@ uniform sampler2D u_video; // Current video frame as texture
2271
2373
  uniform vec2 u_videoResolution; // Video frame width and height in pixels
2272
2374
  uniform float u_videoFrameRate; // Video frame rate in frames per second
2273
2375
 
2376
+ // Multi-Stream Compositor Support (using individual uniforms due to WebGL 1.0 limitations)
2377
+ uniform int u_streamCount; // Number of available compositor input streams (0-8)
2378
+ uniform sampler2D u_stream0; // Stream 0 texture
2379
+ uniform sampler2D u_stream1; // Stream 1 texture
2380
+ uniform sampler2D u_stream2; // Stream 2 texture
2381
+ uniform sampler2D u_stream3; // Stream 3 texture
2382
+ uniform sampler2D u_stream4; // Stream 4 texture
2383
+ uniform sampler2D u_stream5; // Stream 5 texture
2384
+ uniform sampler2D u_stream6; // Stream 6 texture
2385
+ uniform sampler2D u_stream7; // Stream 7 texture
2386
+ uniform vec2 u_stream0Resolution; // Stream 0 resolution
2387
+ uniform vec2 u_stream1Resolution; // Stream 1 resolution
2388
+ uniform vec2 u_stream2Resolution; // Stream 2 resolution
2389
+ uniform vec2 u_stream3Resolution; // Stream 3 resolution
2390
+ uniform vec2 u_stream4Resolution; // Stream 4 resolution
2391
+ uniform vec2 u_stream5Resolution; // Stream 5 resolution
2392
+ uniform vec2 u_stream6Resolution; // Stream 6 resolution
2393
+ uniform vec2 u_stream7Resolution; // Stream 7 resolution
2394
+ uniform bool u_stream0Connected; // Stream 0 connection status
2395
+ uniform bool u_stream1Connected; // Stream 1 connection status
2396
+ uniform bool u_stream2Connected; // Stream 2 connection status
2397
+ uniform bool u_stream3Connected; // Stream 3 connection status
2398
+ uniform bool u_stream4Connected; // Stream 4 connection status
2399
+ uniform bool u_stream5Connected; // Stream 5 connection status
2400
+ uniform bool u_stream6Connected; // Stream 6 connection status
2401
+ uniform bool u_stream7Connected; // Stream 7 connection status
2402
+
2274
2403
  // CV - Face Detection
2275
2404
  uniform int u_faceCount; // Number of detected faces (0-1)
2276
2405
  uniform vec4 u_face0Bounds; // First face bounding box (x, y, width, height)
@@ -2364,13 +2493,22 @@ ${error}`);
2364
2493
  if (!this.program) return;
2365
2494
  const gl = this.gl;
2366
2495
  const numUniforms = gl.getProgramParameter(this.program, gl.ACTIVE_UNIFORMS);
2496
+ const streamUniforms = [];
2367
2497
  for (let i = 0; i < numUniforms; i++) {
2368
2498
  const info = gl.getActiveUniform(this.program, i);
2369
2499
  if (info) {
2370
2500
  const location = gl.getUniformLocation(this.program, info.name);
2371
2501
  this.uniformLocations.set(info.name, location);
2502
+ if (info.name.includes("stream") || info.name.includes("u_stream")) {
2503
+ streamUniforms.push(info.name);
2504
+ }
2372
2505
  }
2373
2506
  }
2507
+ if (streamUniforms.length > 0) {
2508
+ console.log("[ShaderAdapter] Stream-related uniforms found:", streamUniforms);
2509
+ } else {
2510
+ console.log("[ShaderAdapter] NO stream-related uniforms found in shader!");
2511
+ }
2374
2512
  }
2375
2513
  /**
2376
2514
  * Reserve texture units for special textures
@@ -2379,6 +2517,9 @@ ${error}`);
2379
2517
  this.textureUnits.set("u_audioFFT", this.nextTextureUnit++);
2380
2518
  this.textureUnits.set("u_video", this.nextTextureUnit++);
2381
2519
  this.textureUnits.set("u_segmentationMask", this.nextTextureUnit++);
2520
+ for (let i = 0; i < ShaderWorkerAdapter.MAX_STREAMS; i++) {
2521
+ this.textureUnits.set(`u_stream${i}`, this.nextTextureUnit++);
2522
+ }
2382
2523
  if (this.backbufferEnabled) {
2383
2524
  this.textureUnits.set("backbuffer", this.nextTextureUnit++);
2384
2525
  }
@@ -2490,16 +2631,16 @@ ${error}`);
2490
2631
  }
2491
2632
  }
2492
2633
  const audio = viji.audio;
2493
- this.setUniform("u_audioVolume", "float", audio.volume?.rms || 0);
2634
+ this.setUniform("u_audioVolume", "float", audio.volume?.current || 0);
2494
2635
  this.setUniform("u_audioPeak", "float", audio.volume?.peak || 0);
2495
- this.setUniform("u_audioBass", "float", audio.bands?.bass || 0);
2636
+ this.setUniform("u_audioBass", "float", audio.bands?.low || 0);
2496
2637
  this.setUniform("u_audioMid", "float", audio.bands?.mid || 0);
2497
- this.setUniform("u_audioTreble", "float", audio.bands?.treble || 0);
2498
- this.setUniform("u_audioSubBass", "float", audio.bands?.subBass || 0);
2638
+ this.setUniform("u_audioTreble", "float", audio.bands?.high || 0);
2639
+ this.setUniform("u_audioSubBass", "float", audio.bands?.low || 0);
2499
2640
  this.setUniform("u_audioLowMid", "float", audio.bands?.lowMid || 0);
2500
2641
  this.setUniform("u_audioHighMid", "float", audio.bands?.highMid || 0);
2501
- this.setUniform("u_audioPresence", "float", audio.bands?.presence || 0);
2502
- this.setUniform("u_audioBrilliance", "float", audio.bands?.brilliance || 0);
2642
+ this.setUniform("u_audioPresence", "float", audio.bands?.highMid || 0);
2643
+ this.setUniform("u_audioBrilliance", "float", audio.bands?.high || 0);
2503
2644
  if (audio.isConnected) {
2504
2645
  this.updateAudioFFTTexture(audio.getFrequencyData());
2505
2646
  }
@@ -2512,6 +2653,25 @@ ${error}`);
2512
2653
  this.setUniform("u_videoResolution", "vec2", [0, 0]);
2513
2654
  this.setUniform("u_videoFrameRate", "float", 0);
2514
2655
  }
2656
+ const streams = viji.streams || [];
2657
+ const streamCount = Math.min(streams.length, ShaderWorkerAdapter.MAX_STREAMS);
2658
+ this.setUniform("u_streamCount", "int", streamCount);
2659
+ for (let i = 0; i < ShaderWorkerAdapter.MAX_STREAMS; i++) {
2660
+ const connectedUniform = `u_stream${i}Connected`;
2661
+ const resolutionUniform = `u_stream${i}Resolution`;
2662
+ if (i < streamCount && streams[i]?.isConnected && streams[i]?.currentFrame) {
2663
+ this.updateStreamTexture(i, streams[i].currentFrame);
2664
+ this.setUniform(
2665
+ resolutionUniform,
2666
+ "vec2",
2667
+ [streams[i].frameWidth, streams[i].frameHeight]
2668
+ );
2669
+ this.setUniform(connectedUniform, "bool", true);
2670
+ } else {
2671
+ this.setUniform(resolutionUniform, "vec2", [0, 0]);
2672
+ this.setUniform(connectedUniform, "bool", false);
2673
+ }
2674
+ }
2515
2675
  const faces = video.faces || [];
2516
2676
  this.setUniform("u_faceCount", "int", faces.length);
2517
2677
  if (faces.length > 0) {
@@ -2588,7 +2748,12 @@ ${error}`);
2588
2748
  updateParameterUniforms(parameterObjects) {
2589
2749
  for (const param of this.parameters) {
2590
2750
  const paramObj = parameterObjects.get(param.uniformName);
2591
- if (!paramObj) continue;
2751
+ if (!paramObj) {
2752
+ if (Math.random() < 0.01) {
2753
+ console.log(`[ShaderAdapter] Parameter '${param.uniformName}' not found in parameterObjects`);
2754
+ }
2755
+ continue;
2756
+ }
2592
2757
  const value = paramObj.value;
2593
2758
  switch (param.type) {
2594
2759
  case "slider":
@@ -2620,6 +2785,9 @@ ${error}`);
2620
2785
  setUniform(name, type, value) {
2621
2786
  const location = this.uniformLocations.get(name);
2622
2787
  if (location === null || location === void 0) {
2788
+ if (name.includes("[") && Math.random() < 0.01) {
2789
+ console.log(`[ShaderAdapter] Uniform '${name}' not found (location is ${location})`);
2790
+ }
2623
2791
  return;
2624
2792
  }
2625
2793
  const gl = this.gl;
@@ -2691,6 +2859,7 @@ ${error}`);
2691
2859
  }
2692
2860
  /**
2693
2861
  * Update video texture
2862
+ * Supports both OffscreenCanvas and ImageBitmap
2694
2863
  */
2695
2864
  updateVideoTexture(videoFrame) {
2696
2865
  const gl = this.gl;
@@ -2700,6 +2869,10 @@ ${error}`);
2700
2869
  }
2701
2870
  gl.activeTexture(gl.TEXTURE0 + unit);
2702
2871
  gl.bindTexture(gl.TEXTURE_2D, this.videoTexture);
2872
+ const shouldFlip = videoFrame instanceof OffscreenCanvas;
2873
+ if (shouldFlip) {
2874
+ gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
2875
+ }
2703
2876
  gl.texImage2D(
2704
2877
  gl.TEXTURE_2D,
2705
2878
  0,
@@ -2708,6 +2881,9 @@ ${error}`);
2708
2881
  gl.UNSIGNED_BYTE,
2709
2882
  videoFrame
2710
2883
  );
2884
+ if (shouldFlip) {
2885
+ gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
2886
+ }
2711
2887
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
2712
2888
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
2713
2889
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
@@ -2716,6 +2892,49 @@ ${error}`);
2716
2892
  if (location) {
2717
2893
  gl.uniform1i(location, unit);
2718
2894
  }
2895
+ if (videoFrame instanceof ImageBitmap) {
2896
+ videoFrame.close();
2897
+ }
2898
+ }
2899
+ /**
2900
+ * Update compositor stream texture at specified index
2901
+ * Supports both OffscreenCanvas and ImageBitmap for zero-copy pipeline
2902
+ */
2903
+ updateStreamTexture(index, streamFrame) {
2904
+ const gl = this.gl;
2905
+ const uniformName = `u_stream${index}`;
2906
+ const unit = this.textureUnits.get(uniformName);
2907
+ if (!this.streamTextures[index]) {
2908
+ this.streamTextures[index] = gl.createTexture();
2909
+ }
2910
+ gl.activeTexture(gl.TEXTURE0 + unit);
2911
+ gl.bindTexture(gl.TEXTURE_2D, this.streamTextures[index]);
2912
+ const shouldFlip = streamFrame instanceof OffscreenCanvas;
2913
+ if (shouldFlip) {
2914
+ gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
2915
+ }
2916
+ gl.texImage2D(
2917
+ gl.TEXTURE_2D,
2918
+ 0,
2919
+ gl.RGBA,
2920
+ gl.RGBA,
2921
+ gl.UNSIGNED_BYTE,
2922
+ streamFrame
2923
+ );
2924
+ if (shouldFlip) {
2925
+ gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
2926
+ }
2927
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
2928
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
2929
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
2930
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
2931
+ const location = this.uniformLocations.get(uniformName);
2932
+ if (location) {
2933
+ gl.uniform1i(location, unit);
2934
+ }
2935
+ if (streamFrame instanceof ImageBitmap) {
2936
+ streamFrame.close();
2937
+ }
2719
2938
  }
2720
2939
  /**
2721
2940
  * Update segmentation mask texture
@@ -2800,6 +3019,32 @@ ${error}`);
2800
3019
  getParameterDefinitions() {
2801
3020
  return this.parameters;
2802
3021
  }
3022
+ /**
3023
+ * Cleanup resources
3024
+ */
3025
+ destroy() {
3026
+ const gl = this.gl;
3027
+ if (this.program) {
3028
+ gl.deleteProgram(this.program);
3029
+ this.program = null;
3030
+ }
3031
+ if (this.audioFFTTexture) gl.deleteTexture(this.audioFFTTexture);
3032
+ if (this.videoTexture) gl.deleteTexture(this.videoTexture);
3033
+ if (this.segmentationTexture) gl.deleteTexture(this.segmentationTexture);
3034
+ for (const texture of this.streamTextures) {
3035
+ if (texture) gl.deleteTexture(texture);
3036
+ }
3037
+ this.streamTextures = [];
3038
+ for (const texture of this.textures.values()) {
3039
+ if (texture) gl.deleteTexture(texture);
3040
+ }
3041
+ this.textures.clear();
3042
+ if (this.backbufferFramebuffer) gl.deleteFramebuffer(this.backbufferFramebuffer);
3043
+ if (this.backbufferTexture) gl.deleteTexture(this.backbufferTexture);
3044
+ if (this.currentFramebuffer) gl.deleteFramebuffer(this.currentFramebuffer);
3045
+ if (this.currentTexture) gl.deleteTexture(this.currentTexture);
3046
+ if (this.quadBuffer) gl.deleteBuffer(this.quadBuffer);
3047
+ }
2803
3048
  }
2804
3049
  class VijiWorkerRuntime {
2805
3050
  canvas = null;
@@ -2820,12 +3065,14 @@ class VijiWorkerRuntime {
2820
3065
  // Shader adapter for shader mode
2821
3066
  shaderAdapter = null;
2822
3067
  rendererType = "native";
3068
+ // Pending capture requests (queue to handle multiple simultaneous requests)
3069
+ pendingCaptures = [];
2823
3070
  /**
2824
3071
  * Enable or disable debug logging
2825
3072
  */
2826
3073
  setDebugMode(enabled) {
2827
3074
  this.debugMode = enabled;
2828
- if (this.videoSystem) this.videoSystem.setDebugMode(enabled);
3075
+ this.videoSystems.forEach((vs) => vs?.setDebugMode(enabled));
2829
3076
  if (this.parameterSystem && "setDebugMode" in this.parameterSystem) {
2830
3077
  this.parameterSystem.setDebugMode(enabled);
2831
3078
  }
@@ -2850,24 +3097,61 @@ class VijiWorkerRuntime {
2850
3097
  parameterSystem;
2851
3098
  // Interaction system (Phase 7)
2852
3099
  interactionSystem;
2853
- // Video system (Phase 10) - worker-side video processing
2854
- videoSystem;
3100
+ // Video systems (multi-stream) - index 0 = main with CV, 1+ = additional without CV
3101
+ videoSystems = [];
3102
+ hasMainVideoStream = false;
3103
+ // Track if videoSystems[0] is main stream
3104
+ // Auto-capture for frame sources
3105
+ autoCaptureEnabled = false;
3106
+ autoCaptureFormat = { flipY: true };
3107
+ // Default: flip for WebGL compatibility
2855
3108
  // Audio state (Phase 5) - receives analysis results from host
2856
3109
  audioState = {
2857
3110
  isConnected: false,
2858
- volume: { rms: 0, peak: 0 },
3111
+ volume: { current: 0, peak: 0, smoothed: 0 },
2859
3112
  bands: {
2860
- bass: 0,
2861
- mid: 0,
2862
- treble: 0,
2863
- subBass: 0,
3113
+ low: 0,
2864
3114
  lowMid: 0,
3115
+ mid: 0,
2865
3116
  highMid: 0,
2866
- presence: 0,
2867
- brilliance: 0
3117
+ high: 0,
3118
+ lowSmoothed: 0,
3119
+ lowMidSmoothed: 0,
3120
+ midSmoothed: 0,
3121
+ highMidSmoothed: 0,
3122
+ highSmoothed: 0
3123
+ },
3124
+ beat: {
3125
+ kick: 0,
3126
+ snare: 0,
3127
+ hat: 0,
3128
+ any: 0,
3129
+ kickSmoothed: 0,
3130
+ snareSmoothed: 0,
3131
+ anySmoothed: 0,
3132
+ triggers: { any: false, kick: false, snare: false, hat: false },
3133
+ bpm: 120,
3134
+ phase: 0,
3135
+ bar: 0,
3136
+ confidence: 0,
3137
+ isLocked: false
3138
+ },
3139
+ spectral: {
3140
+ brightness: 0,
3141
+ flatness: 0,
3142
+ flux: 0
2868
3143
  },
2869
3144
  frequencyData: new Uint8Array(0)
2870
3145
  };
3146
+ // Device sensor state (internal device + external devices)
3147
+ deviceState = {
3148
+ device: {
3149
+ motion: null,
3150
+ orientation: null,
3151
+ geolocation: null
3152
+ },
3153
+ devices: []
3154
+ };
2871
3155
  // Video state is now managed by the worker-side VideoSystem
2872
3156
  // Artist API object
2873
3157
  viji = {
@@ -2885,6 +3169,7 @@ class VijiWorkerRuntime {
2885
3169
  fps: 60,
2886
3170
  // Audio API (Phase 5) - will be set in constructor
2887
3171
  audio: {},
3172
+ // Main video stream (index 0, CV enabled)
2888
3173
  video: {
2889
3174
  isConnected: false,
2890
3175
  currentFrame: null,
@@ -2893,12 +3178,24 @@ class VijiWorkerRuntime {
2893
3178
  frameRate: 0,
2894
3179
  getFrameData: () => null,
2895
3180
  faces: [],
2896
- hands: []
3181
+ hands: [],
3182
+ pose: null,
3183
+ segmentation: null,
3184
+ cv: {}
2897
3185
  },
3186
+ // Additional video streams (index 1+, no CV)
3187
+ streams: [],
2898
3188
  // Interaction APIs will be added during construction
2899
3189
  mouse: {},
2900
3190
  keyboard: {},
2901
3191
  touches: {},
3192
+ // Device sensor APIs (internal device + external devices)
3193
+ device: {
3194
+ motion: null,
3195
+ orientation: null,
3196
+ geolocation: null
3197
+ },
3198
+ devices: [],
2902
3199
  // Parameter helper functions (return parameter objects) - delegate to parameter system
2903
3200
  slider: (defaultValue, config) => {
2904
3201
  return this.parameterSystem.createSliderParameter(defaultValue, config);
@@ -2947,9 +3244,7 @@ class VijiWorkerRuntime {
2947
3244
  this.postMessage(type, data);
2948
3245
  });
2949
3246
  this.interactionSystem = new InteractionSystem();
2950
- this.videoSystem = new VideoSystem();
2951
3247
  Object.assign(this.viji, this.interactionSystem.getInteractionAPIs());
2952
- Object.assign(this.viji.video, this.videoSystem.getVideoAPI());
2953
3248
  this.viji.audio = {
2954
3249
  ...this.audioState,
2955
3250
  getFrequencyData: () => this.audioState.frequencyData
@@ -3055,16 +3350,38 @@ class VijiWorkerRuntime {
3055
3350
  this.interactionSystem.resetInteractionState();
3056
3351
  this.audioState = {
3057
3352
  isConnected: false,
3058
- volume: { rms: 0, peak: 0 },
3353
+ volume: { current: 0, peak: 0, smoothed: 0 },
3059
3354
  bands: {
3060
- bass: 0,
3061
- mid: 0,
3062
- treble: 0,
3063
- subBass: 0,
3355
+ low: 0,
3064
3356
  lowMid: 0,
3357
+ mid: 0,
3065
3358
  highMid: 0,
3066
- presence: 0,
3067
- brilliance: 0
3359
+ high: 0,
3360
+ lowSmoothed: 0,
3361
+ lowMidSmoothed: 0,
3362
+ midSmoothed: 0,
3363
+ highMidSmoothed: 0,
3364
+ highSmoothed: 0
3365
+ },
3366
+ beat: {
3367
+ kick: 0,
3368
+ snare: 0,
3369
+ hat: 0,
3370
+ any: 0,
3371
+ kickSmoothed: 0,
3372
+ snareSmoothed: 0,
3373
+ anySmoothed: 0,
3374
+ triggers: { any: false, kick: false, snare: false, hat: false },
3375
+ bpm: 120,
3376
+ phase: 0,
3377
+ bar: 0,
3378
+ confidence: 0,
3379
+ isLocked: false
3380
+ },
3381
+ spectral: {
3382
+ brightness: 0,
3383
+ flatness: 0,
3384
+ flux: 0
3068
3385
  },
3069
3386
  frequencyData: new Uint8Array(0)
3070
3387
  };
@@ -3072,8 +3389,37 @@ class VijiWorkerRuntime {
3072
3389
  ...this.audioState,
3073
3390
  getFrequencyData: () => this.audioState.frequencyData
3074
3391
  };
3075
- this.videoSystem.resetVideoState();
3076
- Object.assign(this.viji.video, this.videoSystem.getVideoAPI());
3392
+ this.videoSystems.forEach((vs) => vs.resetVideoState());
3393
+ if (this.videoSystems[0]) {
3394
+ Object.assign(this.viji.video, this.videoSystems[0].getVideoAPI());
3395
+ }
3396
+ this.updateVijiStreams();
3397
+ }
3398
+ /**
3399
+ * Updates viji.streams from videoSystems array
3400
+ * Slices based on whether there's a main stream at index 0
3401
+ */
3402
+ updateVijiStreams() {
3403
+ const startIndex = this.hasMainVideoStream ? 1 : 0;
3404
+ const freshStreams = this.videoSystems.slice(startIndex).map((vs) => {
3405
+ const api = vs?.getVideoAPI() || {
3406
+ isConnected: false,
3407
+ currentFrame: null,
3408
+ frameWidth: 0,
3409
+ frameHeight: 0,
3410
+ frameRate: 0,
3411
+ getFrameData: () => null
3412
+ };
3413
+ return api;
3414
+ });
3415
+ this.viji.streams.length = freshStreams.length;
3416
+ for (let i = 0; i < freshStreams.length; i++) {
3417
+ if (this.viji.streams[i]) {
3418
+ Object.assign(this.viji.streams[i], freshStreams[i]);
3419
+ } else {
3420
+ this.viji.streams[i] = freshStreams[i];
3421
+ }
3422
+ }
3077
3423
  }
3078
3424
  // Send all parameters (from helper functions) to host
3079
3425
  sendAllParametersToHost() {
@@ -3125,6 +3471,21 @@ class VijiWorkerRuntime {
3125
3471
  case "video-config-update":
3126
3472
  this.handleVideoConfigUpdate(message);
3127
3473
  break;
3474
+ case "video-streams-prepare":
3475
+ this.handleVideoStreamsPrepare(message);
3476
+ break;
3477
+ case "video-frame-direct":
3478
+ this.handleVideoFrameDirect(message);
3479
+ break;
3480
+ case "enable-auto-capture":
3481
+ this.autoCaptureEnabled = message.data.enabled;
3482
+ if (message.data.flipY !== void 0) {
3483
+ this.autoCaptureFormat.flipY = message.data.flipY;
3484
+ this.debugLog(`[AutoCapture] ${message.data.enabled ? "ENABLED" : "DISABLED"} with flipY=${message.data.flipY}`);
3485
+ } else {
3486
+ this.debugLog(`[AutoCapture] ${message.data.enabled ? "ENABLED" : "DISABLED"}`);
3487
+ }
3488
+ break;
3128
3489
  case "mouse-update":
3129
3490
  this.handleMouseUpdate(message);
3130
3491
  break;
@@ -3137,6 +3498,9 @@ class VijiWorkerRuntime {
3137
3498
  case "interaction-enabled":
3138
3499
  this.handleInteractionEnabled(message);
3139
3500
  break;
3501
+ case "device-state-update":
3502
+ this.handleDeviceStateUpdate(message);
3503
+ break;
3140
3504
  case "performance-update":
3141
3505
  this.handlePerformanceUpdate(message);
3142
3506
  break;
@@ -3183,8 +3547,8 @@ class VijiWorkerRuntime {
3183
3547
  handleCVFrameRateUpdate(message) {
3184
3548
  if (message.data && message.data.mode) {
3185
3549
  const sceneProcessingFPS = this.frameRateMode === "full" ? this.screenRefreshRate : this.screenRefreshRate / 2;
3186
- if (this.videoSystem) {
3187
- this.videoSystem.handleVideoConfigUpdate({
3550
+ if (this.videoSystems[0]) {
3551
+ this.videoSystems[0].handleVideoConfigUpdate({
3188
3552
  cvFrameRate: {
3189
3553
  mode: message.data.mode,
3190
3554
  sceneTargetFPS: sceneProcessingFPS
@@ -3207,7 +3571,12 @@ class VijiWorkerRuntime {
3207
3571
  const totalTime = this.effectiveFrameTimes[this.effectiveFrameTimes.length - 1] - this.effectiveFrameTimes[0];
3208
3572
  const frameCount = this.effectiveFrameTimes.length - 1;
3209
3573
  const effectiveRefreshRate = Math.round(frameCount / totalTime * 1e3);
3210
- const cvStats = this.videoSystem.getCVStats();
3574
+ const cvStats = this.videoSystems[0]?.getCVStats() || {
3575
+ activeFeatures: [],
3576
+ processingTime: 0,
3577
+ actualFPS: 0,
3578
+ isProcessing: false
3579
+ };
3211
3580
  this.postMessage("performance-update", {
3212
3581
  effectiveRefreshRate,
3213
3582
  frameRateMode: this.frameRateMode,
@@ -3265,10 +3634,23 @@ class VijiWorkerRuntime {
3265
3634
  this.debugLog("Stream update:", message.data);
3266
3635
  }
3267
3636
  handleAudioAnalysisUpdate(message) {
3637
+ const events = message.data.beat.events || [];
3638
+ const triggers = {
3639
+ kick: events.some((e) => e.type === "kick"),
3640
+ snare: events.some((e) => e.type === "snare"),
3641
+ hat: events.some((e) => e.type === "hat"),
3642
+ any: events.length > 0
3643
+ };
3268
3644
  this.audioState = {
3269
3645
  isConnected: message.data.isConnected,
3270
3646
  volume: message.data.volume,
3271
3647
  bands: message.data.bands,
3648
+ beat: {
3649
+ ...message.data.beat,
3650
+ triggers
3651
+ // Add derived triggers (reliable!)
3652
+ },
3653
+ spectral: message.data.spectral,
3272
3654
  frequencyData: new Uint8Array(message.data.frequencyData)
3273
3655
  };
3274
3656
  this.viji.audio = {
@@ -3277,44 +3659,113 @@ class VijiWorkerRuntime {
3277
3659
  };
3278
3660
  }
3279
3661
  handleVideoCanvasSetup(message) {
3280
- this.videoSystem.handleCanvasSetup({
3662
+ const { streamIndex, isMain } = message.data;
3663
+ const index = streamIndex || 0;
3664
+ const videoSystem = new VideoSystem();
3665
+ videoSystem.setDebugMode(this.debugMode);
3666
+ videoSystem.handleCanvasSetup({
3281
3667
  offscreenCanvas: message.data.offscreenCanvas,
3282
3668
  width: message.data.width,
3283
3669
  height: message.data.height,
3284
3670
  timestamp: message.data.timestamp
3285
3671
  });
3286
- Object.assign(this.viji.video, this.videoSystem.getVideoAPI());
3672
+ this.videoSystems[index] = videoSystem;
3673
+ this.debugLog(`Video system setup at index ${index}, isMain: ${isMain}`);
3674
+ if (index === 0) {
3675
+ Object.assign(this.viji.video, videoSystem.getVideoAPI());
3676
+ } else {
3677
+ this.updateVijiStreams();
3678
+ }
3287
3679
  }
3288
3680
  handleVideoFrameUpdate(message) {
3289
- this.videoSystem.handleFrameUpdate({
3290
- imageBitmap: message.data.imageBitmap,
3291
- timestamp: message.data.timestamp
3292
- });
3293
- Object.assign(this.viji.video, this.videoSystem.getVideoAPI());
3681
+ const index = message.data.streamIndex || 0;
3682
+ if (this.videoSystems[index]) {
3683
+ this.videoSystems[index].handleFrameUpdate({
3684
+ imageBitmap: message.data.imageBitmap,
3685
+ timestamp: message.data.timestamp
3686
+ });
3687
+ if (index === 0) {
3688
+ Object.assign(this.viji.video, this.videoSystems[index].getVideoAPI());
3689
+ }
3690
+ }
3294
3691
  }
3295
3692
  handleVideoConfigUpdate(message) {
3296
- this.videoSystem.handleVideoConfigUpdate({
3297
- ...message.data.targetFrameRate && { targetFrameRate: message.data.targetFrameRate },
3298
- ...message.data.cvConfig && { cvConfig: message.data.cvConfig },
3299
- ...message.data.width && { width: message.data.width },
3300
- ...message.data.height && { height: message.data.height },
3301
- ...message.data.disconnect && { disconnect: message.data.disconnect },
3302
- timestamp: message.data.timestamp
3303
- });
3304
- Object.assign(this.viji.video, this.videoSystem.getVideoAPI());
3693
+ const index = message.data.streamIndex || 0;
3694
+ if (this.videoSystems[index]) {
3695
+ this.videoSystems[index].handleVideoConfigUpdate({
3696
+ ...message.data.targetFrameRate && { targetFrameRate: message.data.targetFrameRate },
3697
+ ...message.data.cvConfig && { cvConfig: message.data.cvConfig },
3698
+ ...message.data.width && { width: message.data.width },
3699
+ ...message.data.height && { height: message.data.height },
3700
+ ...message.data.disconnect && { disconnect: message.data.disconnect },
3701
+ timestamp: message.data.timestamp
3702
+ });
3703
+ if (index === 0) {
3704
+ Object.assign(this.viji.video, this.videoSystems[index].getVideoAPI());
3705
+ }
3706
+ }
3707
+ }
3708
+ handleVideoStreamsPrepare(message) {
3709
+ const { mainStream, mediaStreamCount, directFrameCount } = message.data;
3710
+ const totalStreams = (mainStream ? 1 : 0) + mediaStreamCount + directFrameCount;
3711
+ this.hasMainVideoStream = mainStream;
3712
+ this.debugLog(`[Compositor] Preparing video streams: main=${mainStream}, media=${mediaStreamCount}, direct=${directFrameCount}, total=${totalStreams}`);
3713
+ while (this.videoSystems.length < totalStreams) {
3714
+ this.videoSystems.push(new VideoSystem());
3715
+ }
3716
+ const directFrameStartIndex = (mainStream ? 1 : 0) + mediaStreamCount;
3717
+ for (let i = 0; i < directFrameCount; i++) {
3718
+ const index = directFrameStartIndex + i;
3719
+ if (!this.videoSystems[index]) {
3720
+ this.videoSystems[index] = new VideoSystem();
3721
+ }
3722
+ this.videoSystems[index].setDebugMode(this.debugMode);
3723
+ this.videoSystems[index].initializeForDirectFrames(this.rendererType);
3724
+ }
3725
+ this.updateVijiStreams();
3726
+ this.debugLog(`Prepared ${totalStreams} video systems (${directFrameCount} direct frames)`);
3727
+ }
3728
+ handleVideoFrameDirect(message) {
3729
+ const index = message.data.streamIndex || 0;
3730
+ if (!this.videoSystems[index]) {
3731
+ this.debugLog(`[Compositor] Creating new VideoSystem at index ${index} for direct frames`);
3732
+ this.videoSystems[index] = new VideoSystem();
3733
+ this.videoSystems[index].setDebugMode(this.debugMode);
3734
+ this.videoSystems[index].initializeForDirectFrames(this.rendererType);
3735
+ }
3736
+ this.videoSystems[index].handleDirectFrame(message.data);
3305
3737
  }
3306
3738
  handlePerformanceUpdate(message) {
3307
3739
  this.debugLog("Performance update:", message.data);
3308
3740
  }
3309
3741
  /**
3310
3742
  * Handle capture-frame request from host.
3311
- * Produces an ArrayBuffer (image bytes) to send back as transferable.
3743
+ * Defers capture to immediately after the next render completes to avoid race conditions.
3312
3744
  */
3313
3745
  async handleCaptureFrame(message) {
3746
+ this.pendingCaptures.push(message);
3747
+ this.debugLog(`Capture queued: ${message.data.format || "blob"} (${this.pendingCaptures.length} pending)`);
3748
+ }
3749
+ /**
3750
+ * Execute a capture frame request immediately after render completes.
3751
+ * This ensures we capture a fully rendered frame, avoiding race conditions.
3752
+ */
3753
+ async executeCaptureFrame(message) {
3314
3754
  try {
3315
3755
  if (!this.canvas) {
3316
3756
  throw new Error("Canvas not initialized");
3317
3757
  }
3758
+ const format = message.data.format || "blob";
3759
+ if (format === "bitmap") {
3760
+ const bitmap = this.canvas.transferToImageBitmap();
3761
+ self.postMessage({
3762
+ type: "capture-frame-result",
3763
+ id: message.id,
3764
+ timestamp: Date.now(),
3765
+ data: { bitmap }
3766
+ }, [bitmap]);
3767
+ return;
3768
+ }
3318
3769
  const mimeType = message.data.type || "image/jpeg";
3319
3770
  const srcWidth = this.canvas.width;
3320
3771
  const srcHeight = this.canvas.height;
@@ -3361,10 +3812,10 @@ class VijiWorkerRuntime {
3361
3812
  if (!sourceCtx) throw new Error("Failed to create source context");
3362
3813
  const imageData = new ImageData(flippedPixels, srcWidth, srcHeight);
3363
3814
  sourceCtx.putImageData(imageData, 0, 0);
3364
- this.debugLog("Captured frame from WebGL using readPixels");
3815
+ this.debugLog("Captured frame from WebGL using readPixels (post-render)");
3365
3816
  } else {
3366
3817
  sourceCanvas = this.canvas;
3367
- this.debugLog("Captured frame from 2D canvas");
3818
+ this.debugLog("Captured frame from 2D canvas (post-render)");
3368
3819
  }
3369
3820
  const temp = new OffscreenCanvas(targetWidth, targetHeight);
3370
3821
  const tctx = temp.getContext("2d");
@@ -3406,6 +3857,7 @@ class VijiWorkerRuntime {
3406
3857
  if (!this.isRunning) return;
3407
3858
  const currentTime = performance.now();
3408
3859
  this.interactionSystem.frameStart();
3860
+ this.updateVijiStreams();
3409
3861
  this.viji.fps = this.frameRateMode === "full" ? this.screenRefreshRate : this.screenRefreshRate / 2;
3410
3862
  let shouldRender = true;
3411
3863
  if (this.frameRateMode === "half") {
@@ -3439,6 +3891,38 @@ class VijiWorkerRuntime {
3439
3891
  stack: error.stack
3440
3892
  });
3441
3893
  }
3894
+ if (this.pendingCaptures.length > 0) {
3895
+ const captures = [...this.pendingCaptures];
3896
+ this.pendingCaptures = [];
3897
+ for (const captureMsg of captures) {
3898
+ this.executeCaptureFrame(captureMsg).catch((error) => {
3899
+ console.error("Capture execution error:", error);
3900
+ });
3901
+ }
3902
+ }
3903
+ if (this.autoCaptureEnabled && this.canvas) {
3904
+ try {
3905
+ const ctx = this.canvas.getContext("2d") || this.canvas.getContext("webgl2") || this.canvas.getContext("webgl");
3906
+ if (ctx) {
3907
+ const options = this.autoCaptureFormat.flipY ? { imageOrientation: "flipY" } : {};
3908
+ createImageBitmap(this.canvas, options).then((bitmap) => {
3909
+ self.postMessage({
3910
+ type: "auto-capture-result",
3911
+ timestamp: Date.now(),
3912
+ data: { bitmap, timestamp: performance.now() }
3913
+ }, [bitmap]);
3914
+ }).catch((err) => {
3915
+ console.warn("[AutoCapture] ImageBitmap creation failed:", err);
3916
+ });
3917
+ } else {
3918
+ if (this.debugMode && this.frameCount % 60 === 0) {
3919
+ this.debugLog("[AutoCapture] No context yet, skipping capture");
3920
+ }
3921
+ }
3922
+ } catch (error) {
3923
+ console.warn("[AutoCapture] Failed:", error);
3924
+ }
3925
+ }
3442
3926
  }
3443
3927
  this.reportPerformanceStats(currentTime);
3444
3928
  requestAnimationFrame(() => this.renderFrame());
@@ -3464,6 +3948,11 @@ class VijiWorkerRuntime {
3464
3948
  handleInteractionEnabled(message) {
3465
3949
  this.interactionSystem.setInteractionEnabled(message.data.enabled);
3466
3950
  }
3951
+ handleDeviceStateUpdate(message) {
3952
+ this.deviceState = message.data;
3953
+ this.viji.device = this.deviceState.device;
3954
+ this.viji.devices = this.deviceState.devices;
3955
+ }
3467
3956
  }
3468
3957
  class SceneAnalyzer {
3469
3958
  /**
@@ -3508,8 +3997,10 @@ async function setSceneCode(sceneCode) {
3508
3997
  runtime.sendAllParametersToHost();
3509
3998
  } else {
3510
3999
  const functionBody = sceneCode + '\nif (typeof render === "function") {\n return render;\n}\nthrow new Error("Scene code must define a render function");';
3511
- const sceneFunction = new Function("viji", functionBody);
3512
- renderFunction = sceneFunction(runtime.viji);
4000
+ const AsyncFunction = Object.getPrototypeOf(async function() {
4001
+ }).constructor;
4002
+ const sceneFunction = new AsyncFunction("viji", functionBody);
4003
+ renderFunction = await sceneFunction(runtime.viji);
3513
4004
  self.renderFunction = renderFunction;
3514
4005
  runtime.sendAllParametersToHost();
3515
4006
  }
@@ -3527,4 +4018,4 @@ async function setSceneCode(sceneCode) {
3527
4018
  }
3528
4019
  }
3529
4020
  self.setSceneCode = setSceneCode;
3530
- //# sourceMappingURL=viji.worker-C8mrsLDG.js.map
4021
+ //# sourceMappingURL=viji.worker-b3XR7zKX.js.map