@viji-dev/core 0.2.20 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +153 -32
- package/dist/artist-dts-p5.js +1 -1
- package/dist/artist-dts.js +1 -1
- package/dist/artist-global.d.ts +114 -13
- package/dist/artist-js-ambient.d.ts +43 -1
- package/dist/artist-jsdoc.d.ts +43 -1
- package/dist/assets/{viji.worker-BANvuSYW.js → viji.worker-b3XR7zKX.js} +525 -53
- package/dist/assets/viji.worker-b3XR7zKX.js.map +1 -0
- package/dist/assets/wasm/essentia-wasm.web.wasm +0 -0
- package/dist/essentia-wasm.web-CO8uMw0d.js +5696 -0
- package/dist/essentia-wasm.web-CO8uMw0d.js.map +1 -0
- package/dist/essentia.js-core.es-DnrJE0uR.js +3174 -0
- package/dist/essentia.js-core.es-DnrJE0uR.js.map +1 -0
- package/dist/index-BdLMCFEN.js +16180 -0
- package/dist/index-BdLMCFEN.js.map +1 -0
- package/dist/index.d.ts +1128 -36
- package/dist/index.js +5 -2613
- package/dist/index.js.map +1 -1
- package/dist/shader-uniforms.js +125 -0
- package/package.json +5 -2
- package/dist/assets/viji.worker-BANvuSYW.js.map +0 -1
|
@@ -1241,6 +1241,8 @@ class VideoSystem {
|
|
|
1241
1241
|
cvScratchContext = null;
|
|
1242
1242
|
// Debug logging control
|
|
1243
1243
|
debugMode = false;
|
|
1244
|
+
// GPU-only mode flag (for shader compositor only - P5/Canvas need OffscreenCanvas)
|
|
1245
|
+
directGPUMode = false;
|
|
1244
1246
|
/**
|
|
1245
1247
|
* Enable or disable debug logging
|
|
1246
1248
|
*/
|
|
@@ -1286,9 +1288,13 @@ class VideoSystem {
|
|
|
1286
1288
|
*/
|
|
1287
1289
|
getVideoAPI() {
|
|
1288
1290
|
const cvResults = this.cvSystem.getResults();
|
|
1291
|
+
const currentFrame = this.videoState.currentFrame;
|
|
1292
|
+
if (this.directGPUMode && currentFrame instanceof ImageBitmap) {
|
|
1293
|
+
this.videoState.currentFrame = null;
|
|
1294
|
+
}
|
|
1289
1295
|
return {
|
|
1290
1296
|
isConnected: this.videoState.isConnected,
|
|
1291
|
-
currentFrame
|
|
1297
|
+
currentFrame,
|
|
1292
1298
|
frameWidth: this.videoState.frameWidth,
|
|
1293
1299
|
frameHeight: this.videoState.frameHeight,
|
|
1294
1300
|
frameRate: this.videoState.frameRate,
|
|
@@ -1526,6 +1532,54 @@ class VideoSystem {
|
|
|
1526
1532
|
getCVStats() {
|
|
1527
1533
|
return this.cvSystem.getStats();
|
|
1528
1534
|
}
|
|
1535
|
+
/**
|
|
1536
|
+
* Initialize for direct frame injection (no MediaStream)
|
|
1537
|
+
* Enables GPU-only mode for zero-copy pipeline (ImageBitmaps are pre-flipped at capture)
|
|
1538
|
+
*/
|
|
1539
|
+
initializeForDirectFrames(consumerRendererType) {
|
|
1540
|
+
this.disconnectVideo();
|
|
1541
|
+
this.directGPUMode = consumerRendererType === "shader";
|
|
1542
|
+
if (this.directGPUMode) {
|
|
1543
|
+
this.debugLog("VideoSystem initialized in direct GPU mode (zero-copy, pre-flipped ImageBitmaps)");
|
|
1544
|
+
} else {
|
|
1545
|
+
this.offscreenCanvas = new OffscreenCanvas(1920, 1080);
|
|
1546
|
+
this.ctx = this.offscreenCanvas.getContext("2d", { willReadFrequently: true });
|
|
1547
|
+
if (!this.ctx) {
|
|
1548
|
+
throw new Error("Failed to get 2D context for direct frames");
|
|
1549
|
+
}
|
|
1550
|
+
this.videoState.currentFrame = this.offscreenCanvas;
|
|
1551
|
+
this.debugLog("VideoSystem initialized with canvas (P5/Canvas consumer)");
|
|
1552
|
+
}
|
|
1553
|
+
this.videoState.isConnected = false;
|
|
1554
|
+
}
|
|
1555
|
+
/**
|
|
1556
|
+
* Handle directly injected frame (zero-copy)
|
|
1557
|
+
*/
|
|
1558
|
+
handleDirectFrame(data) {
|
|
1559
|
+
if (this.directGPUMode) {
|
|
1560
|
+
this.videoState.currentFrame = data.imageBitmap;
|
|
1561
|
+
this.videoState.frameWidth = data.imageBitmap.width;
|
|
1562
|
+
this.videoState.frameHeight = data.imageBitmap.height;
|
|
1563
|
+
this.videoState.isConnected = true;
|
|
1564
|
+
} else {
|
|
1565
|
+
if (!this.offscreenCanvas || !this.ctx) {
|
|
1566
|
+
this.debugLog("[Compositor] Initializing canvas for direct frames");
|
|
1567
|
+
this.offscreenCanvas = new OffscreenCanvas(data.imageBitmap.width, data.imageBitmap.height);
|
|
1568
|
+
this.ctx = this.offscreenCanvas.getContext("2d", { willReadFrequently: true });
|
|
1569
|
+
}
|
|
1570
|
+
if (this.offscreenCanvas.width !== data.imageBitmap.width || this.offscreenCanvas.height !== data.imageBitmap.height) {
|
|
1571
|
+
this.offscreenCanvas.width = data.imageBitmap.width;
|
|
1572
|
+
this.offscreenCanvas.height = data.imageBitmap.height;
|
|
1573
|
+
}
|
|
1574
|
+
this.ctx.drawImage(data.imageBitmap, 0, 0);
|
|
1575
|
+
this.videoState.currentFrame = this.offscreenCanvas;
|
|
1576
|
+
this.videoState.frameWidth = data.imageBitmap.width;
|
|
1577
|
+
this.videoState.frameHeight = data.imageBitmap.height;
|
|
1578
|
+
this.videoState.isConnected = true;
|
|
1579
|
+
this.processCurrentFrame(data.timestamp);
|
|
1580
|
+
data.imageBitmap.close();
|
|
1581
|
+
}
|
|
1582
|
+
}
|
|
1529
1583
|
}
|
|
1530
1584
|
class P5WorkerAdapter {
|
|
1531
1585
|
constructor(offscreenCanvas, _vijiAPI, sceneCode) {
|
|
@@ -1770,6 +1824,49 @@ class P5WorkerAdapter {
|
|
|
1770
1824
|
return null;
|
|
1771
1825
|
}
|
|
1772
1826
|
}
|
|
1827
|
+
/**
|
|
1828
|
+
* Wrap video frames in P5.js-compatible format
|
|
1829
|
+
* P5.js expects images to have {canvas, elt, width, height} structure
|
|
1830
|
+
* This wrapping is done per-frame for P5 scenes only, keeping the artist API unchanged
|
|
1831
|
+
*/
|
|
1832
|
+
wrapVideoFramesForP5(vijiAPI) {
|
|
1833
|
+
if (vijiAPI.video?.currentFrame instanceof OffscreenCanvas) {
|
|
1834
|
+
const canvas = vijiAPI.video.currentFrame;
|
|
1835
|
+
vijiAPI.video.currentFrame = {
|
|
1836
|
+
canvas,
|
|
1837
|
+
elt: canvas,
|
|
1838
|
+
width: canvas.width,
|
|
1839
|
+
height: canvas.height
|
|
1840
|
+
};
|
|
1841
|
+
} else if (vijiAPI.video?.currentFrame instanceof ImageBitmap) {
|
|
1842
|
+
const bitmap = vijiAPI.video.currentFrame;
|
|
1843
|
+
vijiAPI.video.currentFrame = {
|
|
1844
|
+
elt: bitmap,
|
|
1845
|
+
width: bitmap.width,
|
|
1846
|
+
height: bitmap.height
|
|
1847
|
+
};
|
|
1848
|
+
}
|
|
1849
|
+
if (Array.isArray(vijiAPI.streams)) {
|
|
1850
|
+
for (const stream of vijiAPI.streams) {
|
|
1851
|
+
if (stream?.currentFrame instanceof OffscreenCanvas) {
|
|
1852
|
+
const canvas = stream.currentFrame;
|
|
1853
|
+
stream.currentFrame = {
|
|
1854
|
+
canvas,
|
|
1855
|
+
elt: canvas,
|
|
1856
|
+
width: canvas.width,
|
|
1857
|
+
height: canvas.height
|
|
1858
|
+
};
|
|
1859
|
+
} else if (stream?.currentFrame instanceof ImageBitmap) {
|
|
1860
|
+
const bitmap = stream.currentFrame;
|
|
1861
|
+
stream.currentFrame = {
|
|
1862
|
+
elt: bitmap,
|
|
1863
|
+
width: bitmap.width,
|
|
1864
|
+
height: bitmap.height
|
|
1865
|
+
};
|
|
1866
|
+
}
|
|
1867
|
+
}
|
|
1868
|
+
}
|
|
1869
|
+
}
|
|
1773
1870
|
/**
|
|
1774
1871
|
* Add .p5 property to image parameters for P5.js-specific rendering
|
|
1775
1872
|
* This allows artists to use p5.image() while keeping .value for native canvas API
|
|
@@ -1810,6 +1907,7 @@ class P5WorkerAdapter {
|
|
|
1810
1907
|
}
|
|
1811
1908
|
try {
|
|
1812
1909
|
this.addP5PropertyToImageParameters(parameterObjects);
|
|
1910
|
+
this.wrapVideoFramesForP5(vijiAPI);
|
|
1813
1911
|
if (!this.artistSetupComplete && this.setupFn) {
|
|
1814
1912
|
this.setupFn(vijiAPI, this.p5Instance);
|
|
1815
1913
|
this.artistSetupComplete = true;
|
|
@@ -2032,6 +2130,8 @@ class ShaderWorkerAdapter {
|
|
|
2032
2130
|
this.gl = gl;
|
|
2033
2131
|
}
|
|
2034
2132
|
}
|
|
2133
|
+
static MAX_STREAMS = 8;
|
|
2134
|
+
// Maximum number of compositor input streams
|
|
2035
2135
|
gl;
|
|
2036
2136
|
program = null;
|
|
2037
2137
|
uniformLocations = /* @__PURE__ */ new Map();
|
|
@@ -2048,6 +2148,8 @@ class ShaderWorkerAdapter {
|
|
|
2048
2148
|
audioFFTTexture = null;
|
|
2049
2149
|
videoTexture = null;
|
|
2050
2150
|
segmentationTexture = null;
|
|
2151
|
+
// Multi-stream textures
|
|
2152
|
+
streamTextures = [];
|
|
2051
2153
|
// Backbuffer support (ping-pong framebuffers)
|
|
2052
2154
|
backbufferFramebuffer = null;
|
|
2053
2155
|
backbufferTexture = null;
|
|
@@ -2271,6 +2373,33 @@ uniform sampler2D u_video; // Current video frame as texture
|
|
|
2271
2373
|
uniform vec2 u_videoResolution; // Video frame width and height in pixels
|
|
2272
2374
|
uniform float u_videoFrameRate; // Video frame rate in frames per second
|
|
2273
2375
|
|
|
2376
|
+
// Multi-Stream Compositor Support (using individual uniforms due to WebGL 1.0 limitations)
|
|
2377
|
+
uniform int u_streamCount; // Number of available compositor input streams (0-8)
|
|
2378
|
+
uniform sampler2D u_stream0; // Stream 0 texture
|
|
2379
|
+
uniform sampler2D u_stream1; // Stream 1 texture
|
|
2380
|
+
uniform sampler2D u_stream2; // Stream 2 texture
|
|
2381
|
+
uniform sampler2D u_stream3; // Stream 3 texture
|
|
2382
|
+
uniform sampler2D u_stream4; // Stream 4 texture
|
|
2383
|
+
uniform sampler2D u_stream5; // Stream 5 texture
|
|
2384
|
+
uniform sampler2D u_stream6; // Stream 6 texture
|
|
2385
|
+
uniform sampler2D u_stream7; // Stream 7 texture
|
|
2386
|
+
uniform vec2 u_stream0Resolution; // Stream 0 resolution
|
|
2387
|
+
uniform vec2 u_stream1Resolution; // Stream 1 resolution
|
|
2388
|
+
uniform vec2 u_stream2Resolution; // Stream 2 resolution
|
|
2389
|
+
uniform vec2 u_stream3Resolution; // Stream 3 resolution
|
|
2390
|
+
uniform vec2 u_stream4Resolution; // Stream 4 resolution
|
|
2391
|
+
uniform vec2 u_stream5Resolution; // Stream 5 resolution
|
|
2392
|
+
uniform vec2 u_stream6Resolution; // Stream 6 resolution
|
|
2393
|
+
uniform vec2 u_stream7Resolution; // Stream 7 resolution
|
|
2394
|
+
uniform bool u_stream0Connected; // Stream 0 connection status
|
|
2395
|
+
uniform bool u_stream1Connected; // Stream 1 connection status
|
|
2396
|
+
uniform bool u_stream2Connected; // Stream 2 connection status
|
|
2397
|
+
uniform bool u_stream3Connected; // Stream 3 connection status
|
|
2398
|
+
uniform bool u_stream4Connected; // Stream 4 connection status
|
|
2399
|
+
uniform bool u_stream5Connected; // Stream 5 connection status
|
|
2400
|
+
uniform bool u_stream6Connected; // Stream 6 connection status
|
|
2401
|
+
uniform bool u_stream7Connected; // Stream 7 connection status
|
|
2402
|
+
|
|
2274
2403
|
// CV - Face Detection
|
|
2275
2404
|
uniform int u_faceCount; // Number of detected faces (0-1)
|
|
2276
2405
|
uniform vec4 u_face0Bounds; // First face bounding box (x, y, width, height)
|
|
@@ -2364,13 +2493,22 @@ ${error}`);
|
|
|
2364
2493
|
if (!this.program) return;
|
|
2365
2494
|
const gl = this.gl;
|
|
2366
2495
|
const numUniforms = gl.getProgramParameter(this.program, gl.ACTIVE_UNIFORMS);
|
|
2496
|
+
const streamUniforms = [];
|
|
2367
2497
|
for (let i = 0; i < numUniforms; i++) {
|
|
2368
2498
|
const info = gl.getActiveUniform(this.program, i);
|
|
2369
2499
|
if (info) {
|
|
2370
2500
|
const location = gl.getUniformLocation(this.program, info.name);
|
|
2371
2501
|
this.uniformLocations.set(info.name, location);
|
|
2502
|
+
if (info.name.includes("stream") || info.name.includes("u_stream")) {
|
|
2503
|
+
streamUniforms.push(info.name);
|
|
2504
|
+
}
|
|
2372
2505
|
}
|
|
2373
2506
|
}
|
|
2507
|
+
if (streamUniforms.length > 0) {
|
|
2508
|
+
console.log("[ShaderAdapter] Stream-related uniforms found:", streamUniforms);
|
|
2509
|
+
} else {
|
|
2510
|
+
console.log("[ShaderAdapter] NO stream-related uniforms found in shader!");
|
|
2511
|
+
}
|
|
2374
2512
|
}
|
|
2375
2513
|
/**
|
|
2376
2514
|
* Reserve texture units for special textures
|
|
@@ -2379,6 +2517,9 @@ ${error}`);
|
|
|
2379
2517
|
this.textureUnits.set("u_audioFFT", this.nextTextureUnit++);
|
|
2380
2518
|
this.textureUnits.set("u_video", this.nextTextureUnit++);
|
|
2381
2519
|
this.textureUnits.set("u_segmentationMask", this.nextTextureUnit++);
|
|
2520
|
+
for (let i = 0; i < ShaderWorkerAdapter.MAX_STREAMS; i++) {
|
|
2521
|
+
this.textureUnits.set(`u_stream${i}`, this.nextTextureUnit++);
|
|
2522
|
+
}
|
|
2382
2523
|
if (this.backbufferEnabled) {
|
|
2383
2524
|
this.textureUnits.set("backbuffer", this.nextTextureUnit++);
|
|
2384
2525
|
}
|
|
@@ -2490,16 +2631,16 @@ ${error}`);
|
|
|
2490
2631
|
}
|
|
2491
2632
|
}
|
|
2492
2633
|
const audio = viji.audio;
|
|
2493
|
-
this.setUniform("u_audioVolume", "float", audio.volume?.
|
|
2634
|
+
this.setUniform("u_audioVolume", "float", audio.volume?.current || 0);
|
|
2494
2635
|
this.setUniform("u_audioPeak", "float", audio.volume?.peak || 0);
|
|
2495
|
-
this.setUniform("u_audioBass", "float", audio.bands?.
|
|
2636
|
+
this.setUniform("u_audioBass", "float", audio.bands?.low || 0);
|
|
2496
2637
|
this.setUniform("u_audioMid", "float", audio.bands?.mid || 0);
|
|
2497
|
-
this.setUniform("u_audioTreble", "float", audio.bands?.
|
|
2498
|
-
this.setUniform("u_audioSubBass", "float", audio.bands?.
|
|
2638
|
+
this.setUniform("u_audioTreble", "float", audio.bands?.high || 0);
|
|
2639
|
+
this.setUniform("u_audioSubBass", "float", audio.bands?.low || 0);
|
|
2499
2640
|
this.setUniform("u_audioLowMid", "float", audio.bands?.lowMid || 0);
|
|
2500
2641
|
this.setUniform("u_audioHighMid", "float", audio.bands?.highMid || 0);
|
|
2501
|
-
this.setUniform("u_audioPresence", "float", audio.bands?.
|
|
2502
|
-
this.setUniform("u_audioBrilliance", "float", audio.bands?.
|
|
2642
|
+
this.setUniform("u_audioPresence", "float", audio.bands?.highMid || 0);
|
|
2643
|
+
this.setUniform("u_audioBrilliance", "float", audio.bands?.high || 0);
|
|
2503
2644
|
if (audio.isConnected) {
|
|
2504
2645
|
this.updateAudioFFTTexture(audio.getFrequencyData());
|
|
2505
2646
|
}
|
|
@@ -2512,6 +2653,25 @@ ${error}`);
|
|
|
2512
2653
|
this.setUniform("u_videoResolution", "vec2", [0, 0]);
|
|
2513
2654
|
this.setUniform("u_videoFrameRate", "float", 0);
|
|
2514
2655
|
}
|
|
2656
|
+
const streams = viji.streams || [];
|
|
2657
|
+
const streamCount = Math.min(streams.length, ShaderWorkerAdapter.MAX_STREAMS);
|
|
2658
|
+
this.setUniform("u_streamCount", "int", streamCount);
|
|
2659
|
+
for (let i = 0; i < ShaderWorkerAdapter.MAX_STREAMS; i++) {
|
|
2660
|
+
const connectedUniform = `u_stream${i}Connected`;
|
|
2661
|
+
const resolutionUniform = `u_stream${i}Resolution`;
|
|
2662
|
+
if (i < streamCount && streams[i]?.isConnected && streams[i]?.currentFrame) {
|
|
2663
|
+
this.updateStreamTexture(i, streams[i].currentFrame);
|
|
2664
|
+
this.setUniform(
|
|
2665
|
+
resolutionUniform,
|
|
2666
|
+
"vec2",
|
|
2667
|
+
[streams[i].frameWidth, streams[i].frameHeight]
|
|
2668
|
+
);
|
|
2669
|
+
this.setUniform(connectedUniform, "bool", true);
|
|
2670
|
+
} else {
|
|
2671
|
+
this.setUniform(resolutionUniform, "vec2", [0, 0]);
|
|
2672
|
+
this.setUniform(connectedUniform, "bool", false);
|
|
2673
|
+
}
|
|
2674
|
+
}
|
|
2515
2675
|
const faces = video.faces || [];
|
|
2516
2676
|
this.setUniform("u_faceCount", "int", faces.length);
|
|
2517
2677
|
if (faces.length > 0) {
|
|
@@ -2588,7 +2748,12 @@ ${error}`);
|
|
|
2588
2748
|
updateParameterUniforms(parameterObjects) {
|
|
2589
2749
|
for (const param of this.parameters) {
|
|
2590
2750
|
const paramObj = parameterObjects.get(param.uniformName);
|
|
2591
|
-
if (!paramObj)
|
|
2751
|
+
if (!paramObj) {
|
|
2752
|
+
if (Math.random() < 0.01) {
|
|
2753
|
+
console.log(`[ShaderAdapter] Parameter '${param.uniformName}' not found in parameterObjects`);
|
|
2754
|
+
}
|
|
2755
|
+
continue;
|
|
2756
|
+
}
|
|
2592
2757
|
const value = paramObj.value;
|
|
2593
2758
|
switch (param.type) {
|
|
2594
2759
|
case "slider":
|
|
@@ -2620,6 +2785,9 @@ ${error}`);
|
|
|
2620
2785
|
setUniform(name, type, value) {
|
|
2621
2786
|
const location = this.uniformLocations.get(name);
|
|
2622
2787
|
if (location === null || location === void 0) {
|
|
2788
|
+
if (name.includes("[") && Math.random() < 0.01) {
|
|
2789
|
+
console.log(`[ShaderAdapter] Uniform '${name}' not found (location is ${location})`);
|
|
2790
|
+
}
|
|
2623
2791
|
return;
|
|
2624
2792
|
}
|
|
2625
2793
|
const gl = this.gl;
|
|
@@ -2691,6 +2859,7 @@ ${error}`);
|
|
|
2691
2859
|
}
|
|
2692
2860
|
/**
|
|
2693
2861
|
* Update video texture
|
|
2862
|
+
* Supports both OffscreenCanvas and ImageBitmap
|
|
2694
2863
|
*/
|
|
2695
2864
|
updateVideoTexture(videoFrame) {
|
|
2696
2865
|
const gl = this.gl;
|
|
@@ -2700,6 +2869,10 @@ ${error}`);
|
|
|
2700
2869
|
}
|
|
2701
2870
|
gl.activeTexture(gl.TEXTURE0 + unit);
|
|
2702
2871
|
gl.bindTexture(gl.TEXTURE_2D, this.videoTexture);
|
|
2872
|
+
const shouldFlip = videoFrame instanceof OffscreenCanvas;
|
|
2873
|
+
if (shouldFlip) {
|
|
2874
|
+
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
|
|
2875
|
+
}
|
|
2703
2876
|
gl.texImage2D(
|
|
2704
2877
|
gl.TEXTURE_2D,
|
|
2705
2878
|
0,
|
|
@@ -2708,6 +2881,9 @@ ${error}`);
|
|
|
2708
2881
|
gl.UNSIGNED_BYTE,
|
|
2709
2882
|
videoFrame
|
|
2710
2883
|
);
|
|
2884
|
+
if (shouldFlip) {
|
|
2885
|
+
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
|
|
2886
|
+
}
|
|
2711
2887
|
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
|
|
2712
2888
|
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
|
|
2713
2889
|
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
|
|
@@ -2716,6 +2892,49 @@ ${error}`);
|
|
|
2716
2892
|
if (location) {
|
|
2717
2893
|
gl.uniform1i(location, unit);
|
|
2718
2894
|
}
|
|
2895
|
+
if (videoFrame instanceof ImageBitmap) {
|
|
2896
|
+
videoFrame.close();
|
|
2897
|
+
}
|
|
2898
|
+
}
|
|
2899
|
+
/**
|
|
2900
|
+
* Update compositor stream texture at specified index
|
|
2901
|
+
* Supports both OffscreenCanvas and ImageBitmap for zero-copy pipeline
|
|
2902
|
+
*/
|
|
2903
|
+
updateStreamTexture(index, streamFrame) {
|
|
2904
|
+
const gl = this.gl;
|
|
2905
|
+
const uniformName = `u_stream${index}`;
|
|
2906
|
+
const unit = this.textureUnits.get(uniformName);
|
|
2907
|
+
if (!this.streamTextures[index]) {
|
|
2908
|
+
this.streamTextures[index] = gl.createTexture();
|
|
2909
|
+
}
|
|
2910
|
+
gl.activeTexture(gl.TEXTURE0 + unit);
|
|
2911
|
+
gl.bindTexture(gl.TEXTURE_2D, this.streamTextures[index]);
|
|
2912
|
+
const shouldFlip = streamFrame instanceof OffscreenCanvas;
|
|
2913
|
+
if (shouldFlip) {
|
|
2914
|
+
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
|
|
2915
|
+
}
|
|
2916
|
+
gl.texImage2D(
|
|
2917
|
+
gl.TEXTURE_2D,
|
|
2918
|
+
0,
|
|
2919
|
+
gl.RGBA,
|
|
2920
|
+
gl.RGBA,
|
|
2921
|
+
gl.UNSIGNED_BYTE,
|
|
2922
|
+
streamFrame
|
|
2923
|
+
);
|
|
2924
|
+
if (shouldFlip) {
|
|
2925
|
+
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
|
|
2926
|
+
}
|
|
2927
|
+
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
|
|
2928
|
+
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
|
|
2929
|
+
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
|
|
2930
|
+
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
|
|
2931
|
+
const location = this.uniformLocations.get(uniformName);
|
|
2932
|
+
if (location) {
|
|
2933
|
+
gl.uniform1i(location, unit);
|
|
2934
|
+
}
|
|
2935
|
+
if (streamFrame instanceof ImageBitmap) {
|
|
2936
|
+
streamFrame.close();
|
|
2937
|
+
}
|
|
2719
2938
|
}
|
|
2720
2939
|
/**
|
|
2721
2940
|
* Update segmentation mask texture
|
|
@@ -2800,6 +3019,32 @@ ${error}`);
|
|
|
2800
3019
|
getParameterDefinitions() {
|
|
2801
3020
|
return this.parameters;
|
|
2802
3021
|
}
|
|
3022
|
+
/**
|
|
3023
|
+
* Cleanup resources
|
|
3024
|
+
*/
|
|
3025
|
+
destroy() {
|
|
3026
|
+
const gl = this.gl;
|
|
3027
|
+
if (this.program) {
|
|
3028
|
+
gl.deleteProgram(this.program);
|
|
3029
|
+
this.program = null;
|
|
3030
|
+
}
|
|
3031
|
+
if (this.audioFFTTexture) gl.deleteTexture(this.audioFFTTexture);
|
|
3032
|
+
if (this.videoTexture) gl.deleteTexture(this.videoTexture);
|
|
3033
|
+
if (this.segmentationTexture) gl.deleteTexture(this.segmentationTexture);
|
|
3034
|
+
for (const texture of this.streamTextures) {
|
|
3035
|
+
if (texture) gl.deleteTexture(texture);
|
|
3036
|
+
}
|
|
3037
|
+
this.streamTextures = [];
|
|
3038
|
+
for (const texture of this.textures.values()) {
|
|
3039
|
+
if (texture) gl.deleteTexture(texture);
|
|
3040
|
+
}
|
|
3041
|
+
this.textures.clear();
|
|
3042
|
+
if (this.backbufferFramebuffer) gl.deleteFramebuffer(this.backbufferFramebuffer);
|
|
3043
|
+
if (this.backbufferTexture) gl.deleteTexture(this.backbufferTexture);
|
|
3044
|
+
if (this.currentFramebuffer) gl.deleteFramebuffer(this.currentFramebuffer);
|
|
3045
|
+
if (this.currentTexture) gl.deleteTexture(this.currentTexture);
|
|
3046
|
+
if (this.quadBuffer) gl.deleteBuffer(this.quadBuffer);
|
|
3047
|
+
}
|
|
2803
3048
|
}
|
|
2804
3049
|
class VijiWorkerRuntime {
|
|
2805
3050
|
canvas = null;
|
|
@@ -2827,7 +3072,7 @@ class VijiWorkerRuntime {
|
|
|
2827
3072
|
*/
|
|
2828
3073
|
setDebugMode(enabled) {
|
|
2829
3074
|
this.debugMode = enabled;
|
|
2830
|
-
|
|
3075
|
+
this.videoSystems.forEach((vs) => vs?.setDebugMode(enabled));
|
|
2831
3076
|
if (this.parameterSystem && "setDebugMode" in this.parameterSystem) {
|
|
2832
3077
|
this.parameterSystem.setDebugMode(enabled);
|
|
2833
3078
|
}
|
|
@@ -2852,24 +3097,61 @@ class VijiWorkerRuntime {
|
|
|
2852
3097
|
parameterSystem;
|
|
2853
3098
|
// Interaction system (Phase 7)
|
|
2854
3099
|
interactionSystem;
|
|
2855
|
-
// Video
|
|
2856
|
-
|
|
3100
|
+
// Video systems (multi-stream) - index 0 = main with CV, 1+ = additional without CV
|
|
3101
|
+
videoSystems = [];
|
|
3102
|
+
hasMainVideoStream = false;
|
|
3103
|
+
// Track if videoSystems[0] is main stream
|
|
3104
|
+
// Auto-capture for frame sources
|
|
3105
|
+
autoCaptureEnabled = false;
|
|
3106
|
+
autoCaptureFormat = { flipY: true };
|
|
3107
|
+
// Default: flip for WebGL compatibility
|
|
2857
3108
|
// Audio state (Phase 5) - receives analysis results from host
|
|
2858
3109
|
audioState = {
|
|
2859
3110
|
isConnected: false,
|
|
2860
|
-
volume: {
|
|
3111
|
+
volume: { current: 0, peak: 0, smoothed: 0 },
|
|
2861
3112
|
bands: {
|
|
2862
|
-
|
|
2863
|
-
mid: 0,
|
|
2864
|
-
treble: 0,
|
|
2865
|
-
subBass: 0,
|
|
3113
|
+
low: 0,
|
|
2866
3114
|
lowMid: 0,
|
|
3115
|
+
mid: 0,
|
|
2867
3116
|
highMid: 0,
|
|
2868
|
-
|
|
2869
|
-
|
|
3117
|
+
high: 0,
|
|
3118
|
+
lowSmoothed: 0,
|
|
3119
|
+
lowMidSmoothed: 0,
|
|
3120
|
+
midSmoothed: 0,
|
|
3121
|
+
highMidSmoothed: 0,
|
|
3122
|
+
highSmoothed: 0
|
|
3123
|
+
},
|
|
3124
|
+
beat: {
|
|
3125
|
+
kick: 0,
|
|
3126
|
+
snare: 0,
|
|
3127
|
+
hat: 0,
|
|
3128
|
+
any: 0,
|
|
3129
|
+
kickSmoothed: 0,
|
|
3130
|
+
snareSmoothed: 0,
|
|
3131
|
+
anySmoothed: 0,
|
|
3132
|
+
triggers: { any: false, kick: false, snare: false, hat: false },
|
|
3133
|
+
bpm: 120,
|
|
3134
|
+
phase: 0,
|
|
3135
|
+
bar: 0,
|
|
3136
|
+
confidence: 0,
|
|
3137
|
+
isLocked: false
|
|
3138
|
+
},
|
|
3139
|
+
spectral: {
|
|
3140
|
+
brightness: 0,
|
|
3141
|
+
flatness: 0,
|
|
3142
|
+
flux: 0
|
|
2870
3143
|
},
|
|
2871
3144
|
frequencyData: new Uint8Array(0)
|
|
2872
3145
|
};
|
|
3146
|
+
// Device sensor state (internal device + external devices)
|
|
3147
|
+
deviceState = {
|
|
3148
|
+
device: {
|
|
3149
|
+
motion: null,
|
|
3150
|
+
orientation: null,
|
|
3151
|
+
geolocation: null
|
|
3152
|
+
},
|
|
3153
|
+
devices: []
|
|
3154
|
+
};
|
|
2873
3155
|
// Video state is now managed by the worker-side VideoSystem
|
|
2874
3156
|
// Artist API object
|
|
2875
3157
|
viji = {
|
|
@@ -2887,6 +3169,7 @@ class VijiWorkerRuntime {
|
|
|
2887
3169
|
fps: 60,
|
|
2888
3170
|
// Audio API (Phase 5) - will be set in constructor
|
|
2889
3171
|
audio: {},
|
|
3172
|
+
// Main video stream (index 0, CV enabled)
|
|
2890
3173
|
video: {
|
|
2891
3174
|
isConnected: false,
|
|
2892
3175
|
currentFrame: null,
|
|
@@ -2895,12 +3178,24 @@ class VijiWorkerRuntime {
|
|
|
2895
3178
|
frameRate: 0,
|
|
2896
3179
|
getFrameData: () => null,
|
|
2897
3180
|
faces: [],
|
|
2898
|
-
hands: []
|
|
3181
|
+
hands: [],
|
|
3182
|
+
pose: null,
|
|
3183
|
+
segmentation: null,
|
|
3184
|
+
cv: {}
|
|
2899
3185
|
},
|
|
3186
|
+
// Additional video streams (index 1+, no CV)
|
|
3187
|
+
streams: [],
|
|
2900
3188
|
// Interaction APIs will be added during construction
|
|
2901
3189
|
mouse: {},
|
|
2902
3190
|
keyboard: {},
|
|
2903
3191
|
touches: {},
|
|
3192
|
+
// Device sensor APIs (internal device + external devices)
|
|
3193
|
+
device: {
|
|
3194
|
+
motion: null,
|
|
3195
|
+
orientation: null,
|
|
3196
|
+
geolocation: null
|
|
3197
|
+
},
|
|
3198
|
+
devices: [],
|
|
2904
3199
|
// Parameter helper functions (return parameter objects) - delegate to parameter system
|
|
2905
3200
|
slider: (defaultValue, config) => {
|
|
2906
3201
|
return this.parameterSystem.createSliderParameter(defaultValue, config);
|
|
@@ -2949,9 +3244,7 @@ class VijiWorkerRuntime {
|
|
|
2949
3244
|
this.postMessage(type, data);
|
|
2950
3245
|
});
|
|
2951
3246
|
this.interactionSystem = new InteractionSystem();
|
|
2952
|
-
this.videoSystem = new VideoSystem();
|
|
2953
3247
|
Object.assign(this.viji, this.interactionSystem.getInteractionAPIs());
|
|
2954
|
-
Object.assign(this.viji.video, this.videoSystem.getVideoAPI());
|
|
2955
3248
|
this.viji.audio = {
|
|
2956
3249
|
...this.audioState,
|
|
2957
3250
|
getFrequencyData: () => this.audioState.frequencyData
|
|
@@ -3057,16 +3350,38 @@ class VijiWorkerRuntime {
|
|
|
3057
3350
|
this.interactionSystem.resetInteractionState();
|
|
3058
3351
|
this.audioState = {
|
|
3059
3352
|
isConnected: false,
|
|
3060
|
-
volume: {
|
|
3353
|
+
volume: { current: 0, peak: 0, smoothed: 0 },
|
|
3061
3354
|
bands: {
|
|
3062
|
-
|
|
3063
|
-
mid: 0,
|
|
3064
|
-
treble: 0,
|
|
3065
|
-
subBass: 0,
|
|
3355
|
+
low: 0,
|
|
3066
3356
|
lowMid: 0,
|
|
3357
|
+
mid: 0,
|
|
3067
3358
|
highMid: 0,
|
|
3068
|
-
|
|
3069
|
-
|
|
3359
|
+
high: 0,
|
|
3360
|
+
lowSmoothed: 0,
|
|
3361
|
+
lowMidSmoothed: 0,
|
|
3362
|
+
midSmoothed: 0,
|
|
3363
|
+
highMidSmoothed: 0,
|
|
3364
|
+
highSmoothed: 0
|
|
3365
|
+
},
|
|
3366
|
+
beat: {
|
|
3367
|
+
kick: 0,
|
|
3368
|
+
snare: 0,
|
|
3369
|
+
hat: 0,
|
|
3370
|
+
any: 0,
|
|
3371
|
+
kickSmoothed: 0,
|
|
3372
|
+
snareSmoothed: 0,
|
|
3373
|
+
anySmoothed: 0,
|
|
3374
|
+
triggers: { any: false, kick: false, snare: false, hat: false },
|
|
3375
|
+
bpm: 120,
|
|
3376
|
+
phase: 0,
|
|
3377
|
+
bar: 0,
|
|
3378
|
+
confidence: 0,
|
|
3379
|
+
isLocked: false
|
|
3380
|
+
},
|
|
3381
|
+
spectral: {
|
|
3382
|
+
brightness: 0,
|
|
3383
|
+
flatness: 0,
|
|
3384
|
+
flux: 0
|
|
3070
3385
|
},
|
|
3071
3386
|
frequencyData: new Uint8Array(0)
|
|
3072
3387
|
};
|
|
@@ -3074,8 +3389,37 @@ class VijiWorkerRuntime {
|
|
|
3074
3389
|
...this.audioState,
|
|
3075
3390
|
getFrequencyData: () => this.audioState.frequencyData
|
|
3076
3391
|
};
|
|
3077
|
-
this.
|
|
3078
|
-
|
|
3392
|
+
this.videoSystems.forEach((vs) => vs.resetVideoState());
|
|
3393
|
+
if (this.videoSystems[0]) {
|
|
3394
|
+
Object.assign(this.viji.video, this.videoSystems[0].getVideoAPI());
|
|
3395
|
+
}
|
|
3396
|
+
this.updateVijiStreams();
|
|
3397
|
+
}
|
|
3398
|
+
/**
|
|
3399
|
+
* Updates viji.streams from videoSystems array
|
|
3400
|
+
* Slices based on whether there's a main stream at index 0
|
|
3401
|
+
*/
|
|
3402
|
+
updateVijiStreams() {
|
|
3403
|
+
const startIndex = this.hasMainVideoStream ? 1 : 0;
|
|
3404
|
+
const freshStreams = this.videoSystems.slice(startIndex).map((vs) => {
|
|
3405
|
+
const api = vs?.getVideoAPI() || {
|
|
3406
|
+
isConnected: false,
|
|
3407
|
+
currentFrame: null,
|
|
3408
|
+
frameWidth: 0,
|
|
3409
|
+
frameHeight: 0,
|
|
3410
|
+
frameRate: 0,
|
|
3411
|
+
getFrameData: () => null
|
|
3412
|
+
};
|
|
3413
|
+
return api;
|
|
3414
|
+
});
|
|
3415
|
+
this.viji.streams.length = freshStreams.length;
|
|
3416
|
+
for (let i = 0; i < freshStreams.length; i++) {
|
|
3417
|
+
if (this.viji.streams[i]) {
|
|
3418
|
+
Object.assign(this.viji.streams[i], freshStreams[i]);
|
|
3419
|
+
} else {
|
|
3420
|
+
this.viji.streams[i] = freshStreams[i];
|
|
3421
|
+
}
|
|
3422
|
+
}
|
|
3079
3423
|
}
|
|
3080
3424
|
// Send all parameters (from helper functions) to host
|
|
3081
3425
|
sendAllParametersToHost() {
|
|
@@ -3127,6 +3471,21 @@ class VijiWorkerRuntime {
|
|
|
3127
3471
|
case "video-config-update":
|
|
3128
3472
|
this.handleVideoConfigUpdate(message);
|
|
3129
3473
|
break;
|
|
3474
|
+
case "video-streams-prepare":
|
|
3475
|
+
this.handleVideoStreamsPrepare(message);
|
|
3476
|
+
break;
|
|
3477
|
+
case "video-frame-direct":
|
|
3478
|
+
this.handleVideoFrameDirect(message);
|
|
3479
|
+
break;
|
|
3480
|
+
case "enable-auto-capture":
|
|
3481
|
+
this.autoCaptureEnabled = message.data.enabled;
|
|
3482
|
+
if (message.data.flipY !== void 0) {
|
|
3483
|
+
this.autoCaptureFormat.flipY = message.data.flipY;
|
|
3484
|
+
this.debugLog(`[AutoCapture] ${message.data.enabled ? "ENABLED" : "DISABLED"} with flipY=${message.data.flipY}`);
|
|
3485
|
+
} else {
|
|
3486
|
+
this.debugLog(`[AutoCapture] ${message.data.enabled ? "ENABLED" : "DISABLED"}`);
|
|
3487
|
+
}
|
|
3488
|
+
break;
|
|
3130
3489
|
case "mouse-update":
|
|
3131
3490
|
this.handleMouseUpdate(message);
|
|
3132
3491
|
break;
|
|
@@ -3139,6 +3498,9 @@ class VijiWorkerRuntime {
|
|
|
3139
3498
|
case "interaction-enabled":
|
|
3140
3499
|
this.handleInteractionEnabled(message);
|
|
3141
3500
|
break;
|
|
3501
|
+
case "device-state-update":
|
|
3502
|
+
this.handleDeviceStateUpdate(message);
|
|
3503
|
+
break;
|
|
3142
3504
|
case "performance-update":
|
|
3143
3505
|
this.handlePerformanceUpdate(message);
|
|
3144
3506
|
break;
|
|
@@ -3185,8 +3547,8 @@ class VijiWorkerRuntime {
|
|
|
3185
3547
|
handleCVFrameRateUpdate(message) {
|
|
3186
3548
|
if (message.data && message.data.mode) {
|
|
3187
3549
|
const sceneProcessingFPS = this.frameRateMode === "full" ? this.screenRefreshRate : this.screenRefreshRate / 2;
|
|
3188
|
-
if (this.
|
|
3189
|
-
this.
|
|
3550
|
+
if (this.videoSystems[0]) {
|
|
3551
|
+
this.videoSystems[0].handleVideoConfigUpdate({
|
|
3190
3552
|
cvFrameRate: {
|
|
3191
3553
|
mode: message.data.mode,
|
|
3192
3554
|
sceneTargetFPS: sceneProcessingFPS
|
|
@@ -3209,7 +3571,12 @@ class VijiWorkerRuntime {
|
|
|
3209
3571
|
const totalTime = this.effectiveFrameTimes[this.effectiveFrameTimes.length - 1] - this.effectiveFrameTimes[0];
|
|
3210
3572
|
const frameCount = this.effectiveFrameTimes.length - 1;
|
|
3211
3573
|
const effectiveRefreshRate = Math.round(frameCount / totalTime * 1e3);
|
|
3212
|
-
const cvStats = this.
|
|
3574
|
+
const cvStats = this.videoSystems[0]?.getCVStats() || {
|
|
3575
|
+
activeFeatures: [],
|
|
3576
|
+
processingTime: 0,
|
|
3577
|
+
actualFPS: 0,
|
|
3578
|
+
isProcessing: false
|
|
3579
|
+
};
|
|
3213
3580
|
this.postMessage("performance-update", {
|
|
3214
3581
|
effectiveRefreshRate,
|
|
3215
3582
|
frameRateMode: this.frameRateMode,
|
|
@@ -3267,10 +3634,23 @@ class VijiWorkerRuntime {
|
|
|
3267
3634
|
this.debugLog("Stream update:", message.data);
|
|
3268
3635
|
}
|
|
3269
3636
|
handleAudioAnalysisUpdate(message) {
|
|
3637
|
+
const events = message.data.beat.events || [];
|
|
3638
|
+
const triggers = {
|
|
3639
|
+
kick: events.some((e) => e.type === "kick"),
|
|
3640
|
+
snare: events.some((e) => e.type === "snare"),
|
|
3641
|
+
hat: events.some((e) => e.type === "hat"),
|
|
3642
|
+
any: events.length > 0
|
|
3643
|
+
};
|
|
3270
3644
|
this.audioState = {
|
|
3271
3645
|
isConnected: message.data.isConnected,
|
|
3272
3646
|
volume: message.data.volume,
|
|
3273
3647
|
bands: message.data.bands,
|
|
3648
|
+
beat: {
|
|
3649
|
+
...message.data.beat,
|
|
3650
|
+
triggers
|
|
3651
|
+
// Add derived triggers (reliable!)
|
|
3652
|
+
},
|
|
3653
|
+
spectral: message.data.spectral,
|
|
3274
3654
|
frequencyData: new Uint8Array(message.data.frequencyData)
|
|
3275
3655
|
};
|
|
3276
3656
|
this.viji.audio = {
|
|
@@ -3279,31 +3659,81 @@ class VijiWorkerRuntime {
|
|
|
3279
3659
|
};
|
|
3280
3660
|
}
|
|
3281
3661
|
handleVideoCanvasSetup(message) {
|
|
3282
|
-
|
|
3662
|
+
const { streamIndex, isMain } = message.data;
|
|
3663
|
+
const index = streamIndex || 0;
|
|
3664
|
+
const videoSystem = new VideoSystem();
|
|
3665
|
+
videoSystem.setDebugMode(this.debugMode);
|
|
3666
|
+
videoSystem.handleCanvasSetup({
|
|
3283
3667
|
offscreenCanvas: message.data.offscreenCanvas,
|
|
3284
3668
|
width: message.data.width,
|
|
3285
3669
|
height: message.data.height,
|
|
3286
3670
|
timestamp: message.data.timestamp
|
|
3287
3671
|
});
|
|
3288
|
-
|
|
3672
|
+
this.videoSystems[index] = videoSystem;
|
|
3673
|
+
this.debugLog(`Video system setup at index ${index}, isMain: ${isMain}`);
|
|
3674
|
+
if (index === 0) {
|
|
3675
|
+
Object.assign(this.viji.video, videoSystem.getVideoAPI());
|
|
3676
|
+
} else {
|
|
3677
|
+
this.updateVijiStreams();
|
|
3678
|
+
}
|
|
3289
3679
|
}
|
|
3290
3680
|
handleVideoFrameUpdate(message) {
|
|
3291
|
-
|
|
3292
|
-
|
|
3293
|
-
|
|
3294
|
-
|
|
3295
|
-
|
|
3681
|
+
const index = message.data.streamIndex || 0;
|
|
3682
|
+
if (this.videoSystems[index]) {
|
|
3683
|
+
this.videoSystems[index].handleFrameUpdate({
|
|
3684
|
+
imageBitmap: message.data.imageBitmap,
|
|
3685
|
+
timestamp: message.data.timestamp
|
|
3686
|
+
});
|
|
3687
|
+
if (index === 0) {
|
|
3688
|
+
Object.assign(this.viji.video, this.videoSystems[index].getVideoAPI());
|
|
3689
|
+
}
|
|
3690
|
+
}
|
|
3296
3691
|
}
|
|
3297
3692
|
handleVideoConfigUpdate(message) {
|
|
3298
|
-
|
|
3299
|
-
|
|
3300
|
-
|
|
3301
|
-
|
|
3302
|
-
|
|
3303
|
-
|
|
3304
|
-
|
|
3305
|
-
|
|
3306
|
-
|
|
3693
|
+
const index = message.data.streamIndex || 0;
|
|
3694
|
+
if (this.videoSystems[index]) {
|
|
3695
|
+
this.videoSystems[index].handleVideoConfigUpdate({
|
|
3696
|
+
...message.data.targetFrameRate && { targetFrameRate: message.data.targetFrameRate },
|
|
3697
|
+
...message.data.cvConfig && { cvConfig: message.data.cvConfig },
|
|
3698
|
+
...message.data.width && { width: message.data.width },
|
|
3699
|
+
...message.data.height && { height: message.data.height },
|
|
3700
|
+
...message.data.disconnect && { disconnect: message.data.disconnect },
|
|
3701
|
+
timestamp: message.data.timestamp
|
|
3702
|
+
});
|
|
3703
|
+
if (index === 0) {
|
|
3704
|
+
Object.assign(this.viji.video, this.videoSystems[index].getVideoAPI());
|
|
3705
|
+
}
|
|
3706
|
+
}
|
|
3707
|
+
}
|
|
3708
|
+
handleVideoStreamsPrepare(message) {
|
|
3709
|
+
const { mainStream, mediaStreamCount, directFrameCount } = message.data;
|
|
3710
|
+
const totalStreams = (mainStream ? 1 : 0) + mediaStreamCount + directFrameCount;
|
|
3711
|
+
this.hasMainVideoStream = mainStream;
|
|
3712
|
+
this.debugLog(`[Compositor] Preparing video streams: main=${mainStream}, media=${mediaStreamCount}, direct=${directFrameCount}, total=${totalStreams}`);
|
|
3713
|
+
while (this.videoSystems.length < totalStreams) {
|
|
3714
|
+
this.videoSystems.push(new VideoSystem());
|
|
3715
|
+
}
|
|
3716
|
+
const directFrameStartIndex = (mainStream ? 1 : 0) + mediaStreamCount;
|
|
3717
|
+
for (let i = 0; i < directFrameCount; i++) {
|
|
3718
|
+
const index = directFrameStartIndex + i;
|
|
3719
|
+
if (!this.videoSystems[index]) {
|
|
3720
|
+
this.videoSystems[index] = new VideoSystem();
|
|
3721
|
+
}
|
|
3722
|
+
this.videoSystems[index].setDebugMode(this.debugMode);
|
|
3723
|
+
this.videoSystems[index].initializeForDirectFrames(this.rendererType);
|
|
3724
|
+
}
|
|
3725
|
+
this.updateVijiStreams();
|
|
3726
|
+
this.debugLog(`Prepared ${totalStreams} video systems (${directFrameCount} direct frames)`);
|
|
3727
|
+
}
|
|
3728
|
+
handleVideoFrameDirect(message) {
|
|
3729
|
+
const index = message.data.streamIndex || 0;
|
|
3730
|
+
if (!this.videoSystems[index]) {
|
|
3731
|
+
this.debugLog(`[Compositor] Creating new VideoSystem at index ${index} for direct frames`);
|
|
3732
|
+
this.videoSystems[index] = new VideoSystem();
|
|
3733
|
+
this.videoSystems[index].setDebugMode(this.debugMode);
|
|
3734
|
+
this.videoSystems[index].initializeForDirectFrames(this.rendererType);
|
|
3735
|
+
}
|
|
3736
|
+
this.videoSystems[index].handleDirectFrame(message.data);
|
|
3307
3737
|
}
|
|
3308
3738
|
handlePerformanceUpdate(message) {
|
|
3309
3739
|
this.debugLog("Performance update:", message.data);
|
|
@@ -3314,7 +3744,7 @@ class VijiWorkerRuntime {
|
|
|
3314
3744
|
*/
|
|
3315
3745
|
async handleCaptureFrame(message) {
|
|
3316
3746
|
this.pendingCaptures.push(message);
|
|
3317
|
-
this.debugLog(`Capture
|
|
3747
|
+
this.debugLog(`Capture queued: ${message.data.format || "blob"} (${this.pendingCaptures.length} pending)`);
|
|
3318
3748
|
}
|
|
3319
3749
|
/**
|
|
3320
3750
|
* Execute a capture frame request immediately after render completes.
|
|
@@ -3325,6 +3755,17 @@ class VijiWorkerRuntime {
|
|
|
3325
3755
|
if (!this.canvas) {
|
|
3326
3756
|
throw new Error("Canvas not initialized");
|
|
3327
3757
|
}
|
|
3758
|
+
const format = message.data.format || "blob";
|
|
3759
|
+
if (format === "bitmap") {
|
|
3760
|
+
const bitmap = this.canvas.transferToImageBitmap();
|
|
3761
|
+
self.postMessage({
|
|
3762
|
+
type: "capture-frame-result",
|
|
3763
|
+
id: message.id,
|
|
3764
|
+
timestamp: Date.now(),
|
|
3765
|
+
data: { bitmap }
|
|
3766
|
+
}, [bitmap]);
|
|
3767
|
+
return;
|
|
3768
|
+
}
|
|
3328
3769
|
const mimeType = message.data.type || "image/jpeg";
|
|
3329
3770
|
const srcWidth = this.canvas.width;
|
|
3330
3771
|
const srcHeight = this.canvas.height;
|
|
@@ -3416,6 +3857,7 @@ class VijiWorkerRuntime {
|
|
|
3416
3857
|
if (!this.isRunning) return;
|
|
3417
3858
|
const currentTime = performance.now();
|
|
3418
3859
|
this.interactionSystem.frameStart();
|
|
3860
|
+
this.updateVijiStreams();
|
|
3419
3861
|
this.viji.fps = this.frameRateMode === "full" ? this.screenRefreshRate : this.screenRefreshRate / 2;
|
|
3420
3862
|
let shouldRender = true;
|
|
3421
3863
|
if (this.frameRateMode === "half") {
|
|
@@ -3458,6 +3900,29 @@ class VijiWorkerRuntime {
|
|
|
3458
3900
|
});
|
|
3459
3901
|
}
|
|
3460
3902
|
}
|
|
3903
|
+
if (this.autoCaptureEnabled && this.canvas) {
|
|
3904
|
+
try {
|
|
3905
|
+
const ctx = this.canvas.getContext("2d") || this.canvas.getContext("webgl2") || this.canvas.getContext("webgl");
|
|
3906
|
+
if (ctx) {
|
|
3907
|
+
const options = this.autoCaptureFormat.flipY ? { imageOrientation: "flipY" } : {};
|
|
3908
|
+
createImageBitmap(this.canvas, options).then((bitmap) => {
|
|
3909
|
+
self.postMessage({
|
|
3910
|
+
type: "auto-capture-result",
|
|
3911
|
+
timestamp: Date.now(),
|
|
3912
|
+
data: { bitmap, timestamp: performance.now() }
|
|
3913
|
+
}, [bitmap]);
|
|
3914
|
+
}).catch((err) => {
|
|
3915
|
+
console.warn("[AutoCapture] ImageBitmap creation failed:", err);
|
|
3916
|
+
});
|
|
3917
|
+
} else {
|
|
3918
|
+
if (this.debugMode && this.frameCount % 60 === 0) {
|
|
3919
|
+
this.debugLog("[AutoCapture] No context yet, skipping capture");
|
|
3920
|
+
}
|
|
3921
|
+
}
|
|
3922
|
+
} catch (error) {
|
|
3923
|
+
console.warn("[AutoCapture] Failed:", error);
|
|
3924
|
+
}
|
|
3925
|
+
}
|
|
3461
3926
|
}
|
|
3462
3927
|
this.reportPerformanceStats(currentTime);
|
|
3463
3928
|
requestAnimationFrame(() => this.renderFrame());
|
|
@@ -3483,6 +3948,11 @@ class VijiWorkerRuntime {
|
|
|
3483
3948
|
handleInteractionEnabled(message) {
|
|
3484
3949
|
this.interactionSystem.setInteractionEnabled(message.data.enabled);
|
|
3485
3950
|
}
|
|
3951
|
+
handleDeviceStateUpdate(message) {
|
|
3952
|
+
this.deviceState = message.data;
|
|
3953
|
+
this.viji.device = this.deviceState.device;
|
|
3954
|
+
this.viji.devices = this.deviceState.devices;
|
|
3955
|
+
}
|
|
3486
3956
|
}
|
|
3487
3957
|
class SceneAnalyzer {
|
|
3488
3958
|
/**
|
|
@@ -3527,8 +3997,10 @@ async function setSceneCode(sceneCode) {
|
|
|
3527
3997
|
runtime.sendAllParametersToHost();
|
|
3528
3998
|
} else {
|
|
3529
3999
|
const functionBody = sceneCode + '\nif (typeof render === "function") {\n return render;\n}\nthrow new Error("Scene code must define a render function");';
|
|
3530
|
-
const
|
|
3531
|
-
|
|
4000
|
+
const AsyncFunction = Object.getPrototypeOf(async function() {
|
|
4001
|
+
}).constructor;
|
|
4002
|
+
const sceneFunction = new AsyncFunction("viji", functionBody);
|
|
4003
|
+
renderFunction = await sceneFunction(runtime.viji);
|
|
3532
4004
|
self.renderFunction = renderFunction;
|
|
3533
4005
|
runtime.sendAllParametersToHost();
|
|
3534
4006
|
}
|
|
@@ -3546,4 +4018,4 @@ async function setSceneCode(sceneCode) {
|
|
|
3546
4018
|
}
|
|
3547
4019
|
}
|
|
3548
4020
|
self.setSceneCode = setSceneCode;
|
|
3549
|
-
//# sourceMappingURL=viji.worker-
|
|
4021
|
+
//# sourceMappingURL=viji.worker-b3XR7zKX.js.map
|