@viji-dev/core 0.2.20 → 0.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +153 -32
- package/dist/artist-dts-p5.js +1 -1
- package/dist/artist-dts.js +1 -1
- package/dist/artist-global.d.ts +114 -13
- package/dist/artist-js-ambient.d.ts +43 -1
- package/dist/artist-jsdoc.d.ts +43 -1
- package/dist/assets/{viji.worker-BANvuSYW.js → viji.worker-Be0jZvYj.js} +709 -61
- package/dist/assets/viji.worker-Be0jZvYj.js.map +1 -0
- package/dist/assets/wasm/essentia-wasm.web.wasm +0 -0
- package/dist/essentia-wasm.web-D7gmeaO3.js +5696 -0
- package/dist/essentia-wasm.web-D7gmeaO3.js.map +1 -0
- package/dist/essentia.js-core.es-DnrJE0uR.js +3174 -0
- package/dist/essentia.js-core.es-DnrJE0uR.js.map +1 -0
- package/dist/index-BV1S8Ps-.js +16180 -0
- package/dist/index-BV1S8Ps-.js.map +1 -0
- package/dist/index.d.ts +1128 -36
- package/dist/index.js +5 -2613
- package/dist/index.js.map +1 -1
- package/dist/shader-uniforms.js +365 -0
- package/package.json +5 -2
- package/dist/assets/viji.worker-BANvuSYW.js.map +0 -1
|
@@ -1241,6 +1241,8 @@ class VideoSystem {
|
|
|
1241
1241
|
cvScratchContext = null;
|
|
1242
1242
|
// Debug logging control
|
|
1243
1243
|
debugMode = false;
|
|
1244
|
+
// GPU-only mode flag (for shader compositor only - P5/Canvas need OffscreenCanvas)
|
|
1245
|
+
directGPUMode = false;
|
|
1244
1246
|
/**
|
|
1245
1247
|
* Enable or disable debug logging
|
|
1246
1248
|
*/
|
|
@@ -1286,9 +1288,13 @@ class VideoSystem {
|
|
|
1286
1288
|
*/
|
|
1287
1289
|
getVideoAPI() {
|
|
1288
1290
|
const cvResults = this.cvSystem.getResults();
|
|
1291
|
+
const currentFrame = this.videoState.currentFrame;
|
|
1292
|
+
if (this.directGPUMode && currentFrame instanceof ImageBitmap) {
|
|
1293
|
+
this.videoState.currentFrame = null;
|
|
1294
|
+
}
|
|
1289
1295
|
return {
|
|
1290
1296
|
isConnected: this.videoState.isConnected,
|
|
1291
|
-
currentFrame
|
|
1297
|
+
currentFrame,
|
|
1292
1298
|
frameWidth: this.videoState.frameWidth,
|
|
1293
1299
|
frameHeight: this.videoState.frameHeight,
|
|
1294
1300
|
frameRate: this.videoState.frameRate,
|
|
@@ -1526,6 +1532,54 @@ class VideoSystem {
|
|
|
1526
1532
|
getCVStats() {
|
|
1527
1533
|
return this.cvSystem.getStats();
|
|
1528
1534
|
}
|
|
1535
|
+
/**
|
|
1536
|
+
* Initialize for direct frame injection (no MediaStream)
|
|
1537
|
+
* Enables GPU-only mode for zero-copy pipeline (ImageBitmaps are pre-flipped at capture)
|
|
1538
|
+
*/
|
|
1539
|
+
initializeForDirectFrames(consumerRendererType) {
|
|
1540
|
+
this.disconnectVideo();
|
|
1541
|
+
this.directGPUMode = consumerRendererType === "shader";
|
|
1542
|
+
if (this.directGPUMode) {
|
|
1543
|
+
this.debugLog("VideoSystem initialized in direct GPU mode (zero-copy, pre-flipped ImageBitmaps)");
|
|
1544
|
+
} else {
|
|
1545
|
+
this.offscreenCanvas = new OffscreenCanvas(1920, 1080);
|
|
1546
|
+
this.ctx = this.offscreenCanvas.getContext("2d", { willReadFrequently: true });
|
|
1547
|
+
if (!this.ctx) {
|
|
1548
|
+
throw new Error("Failed to get 2D context for direct frames");
|
|
1549
|
+
}
|
|
1550
|
+
this.videoState.currentFrame = this.offscreenCanvas;
|
|
1551
|
+
this.debugLog("VideoSystem initialized with canvas (P5/Canvas consumer)");
|
|
1552
|
+
}
|
|
1553
|
+
this.videoState.isConnected = false;
|
|
1554
|
+
}
|
|
1555
|
+
/**
|
|
1556
|
+
* Handle directly injected frame (zero-copy)
|
|
1557
|
+
*/
|
|
1558
|
+
handleDirectFrame(data) {
|
|
1559
|
+
if (this.directGPUMode) {
|
|
1560
|
+
this.videoState.currentFrame = data.imageBitmap;
|
|
1561
|
+
this.videoState.frameWidth = data.imageBitmap.width;
|
|
1562
|
+
this.videoState.frameHeight = data.imageBitmap.height;
|
|
1563
|
+
this.videoState.isConnected = true;
|
|
1564
|
+
} else {
|
|
1565
|
+
if (!this.offscreenCanvas || !this.ctx) {
|
|
1566
|
+
this.debugLog("[Compositor] Initializing canvas for direct frames");
|
|
1567
|
+
this.offscreenCanvas = new OffscreenCanvas(data.imageBitmap.width, data.imageBitmap.height);
|
|
1568
|
+
this.ctx = this.offscreenCanvas.getContext("2d", { willReadFrequently: true });
|
|
1569
|
+
}
|
|
1570
|
+
if (this.offscreenCanvas.width !== data.imageBitmap.width || this.offscreenCanvas.height !== data.imageBitmap.height) {
|
|
1571
|
+
this.offscreenCanvas.width = data.imageBitmap.width;
|
|
1572
|
+
this.offscreenCanvas.height = data.imageBitmap.height;
|
|
1573
|
+
}
|
|
1574
|
+
this.ctx.drawImage(data.imageBitmap, 0, 0);
|
|
1575
|
+
this.videoState.currentFrame = this.offscreenCanvas;
|
|
1576
|
+
this.videoState.frameWidth = data.imageBitmap.width;
|
|
1577
|
+
this.videoState.frameHeight = data.imageBitmap.height;
|
|
1578
|
+
this.videoState.isConnected = true;
|
|
1579
|
+
this.processCurrentFrame(data.timestamp);
|
|
1580
|
+
data.imageBitmap.close();
|
|
1581
|
+
}
|
|
1582
|
+
}
|
|
1529
1583
|
}
|
|
1530
1584
|
class P5WorkerAdapter {
|
|
1531
1585
|
constructor(offscreenCanvas, _vijiAPI, sceneCode) {
|
|
@@ -1770,6 +1824,49 @@ class P5WorkerAdapter {
|
|
|
1770
1824
|
return null;
|
|
1771
1825
|
}
|
|
1772
1826
|
}
|
|
1827
|
+
/**
|
|
1828
|
+
* Wrap video frames in P5.js-compatible format
|
|
1829
|
+
* P5.js expects images to have {canvas, elt, width, height} structure
|
|
1830
|
+
* This wrapping is done per-frame for P5 scenes only, keeping the artist API unchanged
|
|
1831
|
+
*/
|
|
1832
|
+
wrapVideoFramesForP5(vijiAPI) {
|
|
1833
|
+
if (vijiAPI.video?.currentFrame instanceof OffscreenCanvas) {
|
|
1834
|
+
const canvas = vijiAPI.video.currentFrame;
|
|
1835
|
+
vijiAPI.video.currentFrame = {
|
|
1836
|
+
canvas,
|
|
1837
|
+
elt: canvas,
|
|
1838
|
+
width: canvas.width,
|
|
1839
|
+
height: canvas.height
|
|
1840
|
+
};
|
|
1841
|
+
} else if (vijiAPI.video?.currentFrame instanceof ImageBitmap) {
|
|
1842
|
+
const bitmap = vijiAPI.video.currentFrame;
|
|
1843
|
+
vijiAPI.video.currentFrame = {
|
|
1844
|
+
elt: bitmap,
|
|
1845
|
+
width: bitmap.width,
|
|
1846
|
+
height: bitmap.height
|
|
1847
|
+
};
|
|
1848
|
+
}
|
|
1849
|
+
if (Array.isArray(vijiAPI.streams)) {
|
|
1850
|
+
for (const stream of vijiAPI.streams) {
|
|
1851
|
+
if (stream?.currentFrame instanceof OffscreenCanvas) {
|
|
1852
|
+
const canvas = stream.currentFrame;
|
|
1853
|
+
stream.currentFrame = {
|
|
1854
|
+
canvas,
|
|
1855
|
+
elt: canvas,
|
|
1856
|
+
width: canvas.width,
|
|
1857
|
+
height: canvas.height
|
|
1858
|
+
};
|
|
1859
|
+
} else if (stream?.currentFrame instanceof ImageBitmap) {
|
|
1860
|
+
const bitmap = stream.currentFrame;
|
|
1861
|
+
stream.currentFrame = {
|
|
1862
|
+
elt: bitmap,
|
|
1863
|
+
width: bitmap.width,
|
|
1864
|
+
height: bitmap.height
|
|
1865
|
+
};
|
|
1866
|
+
}
|
|
1867
|
+
}
|
|
1868
|
+
}
|
|
1869
|
+
}
|
|
1773
1870
|
/**
|
|
1774
1871
|
* Add .p5 property to image parameters for P5.js-specific rendering
|
|
1775
1872
|
* This allows artists to use p5.image() while keeping .value for native canvas API
|
|
@@ -1810,6 +1907,7 @@ class P5WorkerAdapter {
|
|
|
1810
1907
|
}
|
|
1811
1908
|
try {
|
|
1812
1909
|
this.addP5PropertyToImageParameters(parameterObjects);
|
|
1910
|
+
this.wrapVideoFramesForP5(vijiAPI);
|
|
1813
1911
|
if (!this.artistSetupComplete && this.setupFn) {
|
|
1814
1912
|
this.setupFn(vijiAPI, this.p5Instance);
|
|
1815
1913
|
this.artistSetupComplete = true;
|
|
@@ -2032,6 +2130,10 @@ class ShaderWorkerAdapter {
|
|
|
2032
2130
|
this.gl = gl;
|
|
2033
2131
|
}
|
|
2034
2132
|
}
|
|
2133
|
+
static MAX_STREAMS = 8;
|
|
2134
|
+
// Maximum number of compositor input streams
|
|
2135
|
+
static MAX_EXTERNAL_DEVICES = 8;
|
|
2136
|
+
// Maximum number of external devices
|
|
2035
2137
|
gl;
|
|
2036
2138
|
program = null;
|
|
2037
2139
|
uniformLocations = /* @__PURE__ */ new Map();
|
|
@@ -2048,6 +2150,8 @@ class ShaderWorkerAdapter {
|
|
|
2048
2150
|
audioFFTTexture = null;
|
|
2049
2151
|
videoTexture = null;
|
|
2050
2152
|
segmentationTexture = null;
|
|
2153
|
+
// Multi-stream textures
|
|
2154
|
+
streamTextures = [];
|
|
2051
2155
|
// Backbuffer support (ping-pong framebuffers)
|
|
2052
2156
|
backbufferFramebuffer = null;
|
|
2053
2157
|
backbufferTexture = null;
|
|
@@ -2271,6 +2375,33 @@ uniform sampler2D u_video; // Current video frame as texture
|
|
|
2271
2375
|
uniform vec2 u_videoResolution; // Video frame width and height in pixels
|
|
2272
2376
|
uniform float u_videoFrameRate; // Video frame rate in frames per second
|
|
2273
2377
|
|
|
2378
|
+
// Multi-Stream Compositor Support (using individual uniforms due to WebGL 1.0 limitations)
|
|
2379
|
+
uniform int u_streamCount; // Number of available compositor input streams (0-8)
|
|
2380
|
+
uniform sampler2D u_stream0; // Stream 0 texture
|
|
2381
|
+
uniform sampler2D u_stream1; // Stream 1 texture
|
|
2382
|
+
uniform sampler2D u_stream2; // Stream 2 texture
|
|
2383
|
+
uniform sampler2D u_stream3; // Stream 3 texture
|
|
2384
|
+
uniform sampler2D u_stream4; // Stream 4 texture
|
|
2385
|
+
uniform sampler2D u_stream5; // Stream 5 texture
|
|
2386
|
+
uniform sampler2D u_stream6; // Stream 6 texture
|
|
2387
|
+
uniform sampler2D u_stream7; // Stream 7 texture
|
|
2388
|
+
uniform vec2 u_stream0Resolution; // Stream 0 resolution
|
|
2389
|
+
uniform vec2 u_stream1Resolution; // Stream 1 resolution
|
|
2390
|
+
uniform vec2 u_stream2Resolution; // Stream 2 resolution
|
|
2391
|
+
uniform vec2 u_stream3Resolution; // Stream 3 resolution
|
|
2392
|
+
uniform vec2 u_stream4Resolution; // Stream 4 resolution
|
|
2393
|
+
uniform vec2 u_stream5Resolution; // Stream 5 resolution
|
|
2394
|
+
uniform vec2 u_stream6Resolution; // Stream 6 resolution
|
|
2395
|
+
uniform vec2 u_stream7Resolution; // Stream 7 resolution
|
|
2396
|
+
uniform bool u_stream0Connected; // Stream 0 connection status
|
|
2397
|
+
uniform bool u_stream1Connected; // Stream 1 connection status
|
|
2398
|
+
uniform bool u_stream2Connected; // Stream 2 connection status
|
|
2399
|
+
uniform bool u_stream3Connected; // Stream 3 connection status
|
|
2400
|
+
uniform bool u_stream4Connected; // Stream 4 connection status
|
|
2401
|
+
uniform bool u_stream5Connected; // Stream 5 connection status
|
|
2402
|
+
uniform bool u_stream6Connected; // Stream 6 connection status
|
|
2403
|
+
uniform bool u_stream7Connected; // Stream 7 connection status
|
|
2404
|
+
|
|
2274
2405
|
// CV - Face Detection
|
|
2275
2406
|
uniform int u_faceCount; // Number of detected faces (0-1)
|
|
2276
2407
|
uniform vec4 u_face0Bounds; // First face bounding box (x, y, width, height)
|
|
@@ -2302,6 +2433,58 @@ uniform vec2 u_rightAnklePosition; // Right ankle landmark position in pixels
|
|
|
2302
2433
|
uniform sampler2D u_segmentationMask; // Body segmentation mask texture (0=background, 1=person)
|
|
2303
2434
|
uniform vec2 u_segmentationRes; // Segmentation mask resolution in pixels
|
|
2304
2435
|
|
|
2436
|
+
// Device Sensors - Internal Device (viji.device)
|
|
2437
|
+
uniform vec3 u_deviceAcceleration; // Acceleration without gravity (m/s²) - x, y, z
|
|
2438
|
+
uniform vec3 u_deviceAccelerationGravity; // Acceleration with gravity (m/s²) - x, y, z
|
|
2439
|
+
uniform vec3 u_deviceRotationRate; // Rotation rate (deg/s) - alpha, beta, gamma
|
|
2440
|
+
uniform vec3 u_deviceOrientation; // Device orientation (degrees) - alpha, beta, gamma
|
|
2441
|
+
uniform bool u_deviceOrientationAbsolute; // True if orientation uses magnetometer (compass)
|
|
2442
|
+
uniform vec2 u_deviceLocation; // Geolocation (degrees) - latitude, longitude
|
|
2443
|
+
uniform vec4 u_deviceLocationExt; // Extended geolocation - altitude(m), accuracy(m), heading(deg), speed(m/s)
|
|
2444
|
+
|
|
2445
|
+
// Device Sensors - External Devices (viji.devices[0-7])
|
|
2446
|
+
uniform int u_externalDeviceCount; // Number of connected external devices (0-8)
|
|
2447
|
+
uniform vec3 u_device0Acceleration; // Device 0 acceleration without gravity (m/s²)
|
|
2448
|
+
uniform vec3 u_device0AccelerationGravity; // Device 0 acceleration with gravity (m/s²)
|
|
2449
|
+
uniform vec3 u_device0RotationRate; // Device 0 rotation rate (deg/s)
|
|
2450
|
+
uniform vec3 u_device0Orientation; // Device 0 orientation (degrees) - alpha, beta, gamma
|
|
2451
|
+
uniform vec2 u_device0Location; // Device 0 location - latitude, longitude
|
|
2452
|
+
uniform vec3 u_device1Acceleration; // Device 1 acceleration without gravity (m/s²)
|
|
2453
|
+
uniform vec3 u_device1AccelerationGravity; // Device 1 acceleration with gravity (m/s²)
|
|
2454
|
+
uniform vec3 u_device1RotationRate; // Device 1 rotation rate (deg/s)
|
|
2455
|
+
uniform vec3 u_device1Orientation; // Device 1 orientation (degrees) - alpha, beta, gamma
|
|
2456
|
+
uniform vec2 u_device1Location; // Device 1 location - latitude, longitude
|
|
2457
|
+
uniform vec3 u_device2Acceleration; // Device 2 acceleration without gravity (m/s²)
|
|
2458
|
+
uniform vec3 u_device2AccelerationGravity; // Device 2 acceleration with gravity (m/s²)
|
|
2459
|
+
uniform vec3 u_device2RotationRate; // Device 2 rotation rate (deg/s)
|
|
2460
|
+
uniform vec3 u_device2Orientation; // Device 2 orientation (degrees) - alpha, beta, gamma
|
|
2461
|
+
uniform vec2 u_device2Location; // Device 2 location - latitude, longitude
|
|
2462
|
+
uniform vec3 u_device3Acceleration; // Device 3 acceleration without gravity (m/s²)
|
|
2463
|
+
uniform vec3 u_device3AccelerationGravity; // Device 3 acceleration with gravity (m/s²)
|
|
2464
|
+
uniform vec3 u_device3RotationRate; // Device 3 rotation rate (deg/s)
|
|
2465
|
+
uniform vec3 u_device3Orientation; // Device 3 orientation (degrees) - alpha, beta, gamma
|
|
2466
|
+
uniform vec2 u_device3Location; // Device 3 location - latitude, longitude
|
|
2467
|
+
uniform vec3 u_device4Acceleration; // Device 4 acceleration without gravity (m/s²)
|
|
2468
|
+
uniform vec3 u_device4AccelerationGravity; // Device 4 acceleration with gravity (m/s²)
|
|
2469
|
+
uniform vec3 u_device4RotationRate; // Device 4 rotation rate (deg/s)
|
|
2470
|
+
uniform vec3 u_device4Orientation; // Device 4 orientation (degrees) - alpha, beta, gamma
|
|
2471
|
+
uniform vec2 u_device4Location; // Device 4 location - latitude, longitude
|
|
2472
|
+
uniform vec3 u_device5Acceleration; // Device 5 acceleration without gravity (m/s²)
|
|
2473
|
+
uniform vec3 u_device5AccelerationGravity; // Device 5 acceleration with gravity (m/s²)
|
|
2474
|
+
uniform vec3 u_device5RotationRate; // Device 5 rotation rate (deg/s)
|
|
2475
|
+
uniform vec3 u_device5Orientation; // Device 5 orientation (degrees) - alpha, beta, gamma
|
|
2476
|
+
uniform vec2 u_device5Location; // Device 5 location - latitude, longitude
|
|
2477
|
+
uniform vec3 u_device6Acceleration; // Device 6 acceleration without gravity (m/s²)
|
|
2478
|
+
uniform vec3 u_device6AccelerationGravity; // Device 6 acceleration with gravity (m/s²)
|
|
2479
|
+
uniform vec3 u_device6RotationRate; // Device 6 rotation rate (deg/s)
|
|
2480
|
+
uniform vec3 u_device6Orientation; // Device 6 orientation (degrees) - alpha, beta, gamma
|
|
2481
|
+
uniform vec2 u_device6Location; // Device 6 location - latitude, longitude
|
|
2482
|
+
uniform vec3 u_device7Acceleration; // Device 7 acceleration without gravity (m/s²)
|
|
2483
|
+
uniform vec3 u_device7AccelerationGravity; // Device 7 acceleration with gravity (m/s²)
|
|
2484
|
+
uniform vec3 u_device7RotationRate; // Device 7 rotation rate (deg/s)
|
|
2485
|
+
uniform vec3 u_device7Orientation; // Device 7 orientation (degrees) - alpha, beta, gamma
|
|
2486
|
+
uniform vec2 u_device7Location; // Device 7 location - latitude, longitude
|
|
2487
|
+
|
|
2305
2488
|
// Backbuffer (previous frame feedback)
|
|
2306
2489
|
${this.backbufferEnabled ? "uniform sampler2D backbuffer; // Previous frame texture for feedback effects" : "// backbuffer not enabled"}
|
|
2307
2490
|
`;
|
|
@@ -2364,13 +2547,22 @@ ${error}`);
|
|
|
2364
2547
|
if (!this.program) return;
|
|
2365
2548
|
const gl = this.gl;
|
|
2366
2549
|
const numUniforms = gl.getProgramParameter(this.program, gl.ACTIVE_UNIFORMS);
|
|
2550
|
+
const streamUniforms = [];
|
|
2367
2551
|
for (let i = 0; i < numUniforms; i++) {
|
|
2368
2552
|
const info = gl.getActiveUniform(this.program, i);
|
|
2369
2553
|
if (info) {
|
|
2370
2554
|
const location = gl.getUniformLocation(this.program, info.name);
|
|
2371
2555
|
this.uniformLocations.set(info.name, location);
|
|
2556
|
+
if (info.name.includes("stream") || info.name.includes("u_stream")) {
|
|
2557
|
+
streamUniforms.push(info.name);
|
|
2558
|
+
}
|
|
2372
2559
|
}
|
|
2373
2560
|
}
|
|
2561
|
+
if (streamUniforms.length > 0) {
|
|
2562
|
+
console.log("[ShaderAdapter] Stream-related uniforms found:", streamUniforms);
|
|
2563
|
+
} else {
|
|
2564
|
+
console.log("[ShaderAdapter] NO stream-related uniforms found in shader!");
|
|
2565
|
+
}
|
|
2374
2566
|
}
|
|
2375
2567
|
/**
|
|
2376
2568
|
* Reserve texture units for special textures
|
|
@@ -2379,6 +2571,9 @@ ${error}`);
|
|
|
2379
2571
|
this.textureUnits.set("u_audioFFT", this.nextTextureUnit++);
|
|
2380
2572
|
this.textureUnits.set("u_video", this.nextTextureUnit++);
|
|
2381
2573
|
this.textureUnits.set("u_segmentationMask", this.nextTextureUnit++);
|
|
2574
|
+
for (let i = 0; i < ShaderWorkerAdapter.MAX_STREAMS; i++) {
|
|
2575
|
+
this.textureUnits.set(`u_stream${i}`, this.nextTextureUnit++);
|
|
2576
|
+
}
|
|
2382
2577
|
if (this.backbufferEnabled) {
|
|
2383
2578
|
this.textureUnits.set("backbuffer", this.nextTextureUnit++);
|
|
2384
2579
|
}
|
|
@@ -2490,16 +2685,16 @@ ${error}`);
|
|
|
2490
2685
|
}
|
|
2491
2686
|
}
|
|
2492
2687
|
const audio = viji.audio;
|
|
2493
|
-
this.setUniform("u_audioVolume", "float", audio.volume?.
|
|
2688
|
+
this.setUniform("u_audioVolume", "float", audio.volume?.current || 0);
|
|
2494
2689
|
this.setUniform("u_audioPeak", "float", audio.volume?.peak || 0);
|
|
2495
|
-
this.setUniform("u_audioBass", "float", audio.bands?.
|
|
2690
|
+
this.setUniform("u_audioBass", "float", audio.bands?.low || 0);
|
|
2496
2691
|
this.setUniform("u_audioMid", "float", audio.bands?.mid || 0);
|
|
2497
|
-
this.setUniform("u_audioTreble", "float", audio.bands?.
|
|
2498
|
-
this.setUniform("u_audioSubBass", "float", audio.bands?.
|
|
2692
|
+
this.setUniform("u_audioTreble", "float", audio.bands?.high || 0);
|
|
2693
|
+
this.setUniform("u_audioSubBass", "float", audio.bands?.low || 0);
|
|
2499
2694
|
this.setUniform("u_audioLowMid", "float", audio.bands?.lowMid || 0);
|
|
2500
2695
|
this.setUniform("u_audioHighMid", "float", audio.bands?.highMid || 0);
|
|
2501
|
-
this.setUniform("u_audioPresence", "float", audio.bands?.
|
|
2502
|
-
this.setUniform("u_audioBrilliance", "float", audio.bands?.
|
|
2696
|
+
this.setUniform("u_audioPresence", "float", audio.bands?.highMid || 0);
|
|
2697
|
+
this.setUniform("u_audioBrilliance", "float", audio.bands?.high || 0);
|
|
2503
2698
|
if (audio.isConnected) {
|
|
2504
2699
|
this.updateAudioFFTTexture(audio.getFrequencyData());
|
|
2505
2700
|
}
|
|
@@ -2512,6 +2707,25 @@ ${error}`);
|
|
|
2512
2707
|
this.setUniform("u_videoResolution", "vec2", [0, 0]);
|
|
2513
2708
|
this.setUniform("u_videoFrameRate", "float", 0);
|
|
2514
2709
|
}
|
|
2710
|
+
const streams = viji.streams || [];
|
|
2711
|
+
const streamCount = Math.min(streams.length, ShaderWorkerAdapter.MAX_STREAMS);
|
|
2712
|
+
this.setUniform("u_streamCount", "int", streamCount);
|
|
2713
|
+
for (let i = 0; i < ShaderWorkerAdapter.MAX_STREAMS; i++) {
|
|
2714
|
+
const connectedUniform = `u_stream${i}Connected`;
|
|
2715
|
+
const resolutionUniform = `u_stream${i}Resolution`;
|
|
2716
|
+
if (i < streamCount && streams[i]?.isConnected && streams[i]?.currentFrame) {
|
|
2717
|
+
this.updateStreamTexture(i, streams[i].currentFrame);
|
|
2718
|
+
this.setUniform(
|
|
2719
|
+
resolutionUniform,
|
|
2720
|
+
"vec2",
|
|
2721
|
+
[streams[i].frameWidth, streams[i].frameHeight]
|
|
2722
|
+
);
|
|
2723
|
+
this.setUniform(connectedUniform, "bool", true);
|
|
2724
|
+
} else {
|
|
2725
|
+
this.setUniform(resolutionUniform, "vec2", [0, 0]);
|
|
2726
|
+
this.setUniform(connectedUniform, "bool", false);
|
|
2727
|
+
}
|
|
2728
|
+
}
|
|
2515
2729
|
const faces = video.faces || [];
|
|
2516
2730
|
this.setUniform("u_faceCount", "int", faces.length);
|
|
2517
2731
|
if (faces.length > 0) {
|
|
@@ -2581,6 +2795,134 @@ ${error}`);
|
|
|
2581
2795
|
} else {
|
|
2582
2796
|
this.setUniform("u_segmentationRes", "vec2", [0, 0]);
|
|
2583
2797
|
}
|
|
2798
|
+
const internalDevice = viji.device;
|
|
2799
|
+
if (internalDevice) {
|
|
2800
|
+
const motion = internalDevice.motion;
|
|
2801
|
+
if (motion?.acceleration) {
|
|
2802
|
+
this.setUniform("u_deviceAcceleration", "vec3", [
|
|
2803
|
+
motion.acceleration.x ?? 0,
|
|
2804
|
+
motion.acceleration.y ?? 0,
|
|
2805
|
+
motion.acceleration.z ?? 0
|
|
2806
|
+
]);
|
|
2807
|
+
} else {
|
|
2808
|
+
this.setUniform("u_deviceAcceleration", "vec3", [0, 0, 0]);
|
|
2809
|
+
}
|
|
2810
|
+
if (motion?.accelerationIncludingGravity) {
|
|
2811
|
+
this.setUniform("u_deviceAccelerationGravity", "vec3", [
|
|
2812
|
+
motion.accelerationIncludingGravity.x ?? 0,
|
|
2813
|
+
motion.accelerationIncludingGravity.y ?? 0,
|
|
2814
|
+
motion.accelerationIncludingGravity.z ?? 0
|
|
2815
|
+
]);
|
|
2816
|
+
} else {
|
|
2817
|
+
this.setUniform("u_deviceAccelerationGravity", "vec3", [0, 0, 0]);
|
|
2818
|
+
}
|
|
2819
|
+
if (motion?.rotationRate) {
|
|
2820
|
+
this.setUniform("u_deviceRotationRate", "vec3", [
|
|
2821
|
+
motion.rotationRate.alpha ?? 0,
|
|
2822
|
+
motion.rotationRate.beta ?? 0,
|
|
2823
|
+
motion.rotationRate.gamma ?? 0
|
|
2824
|
+
]);
|
|
2825
|
+
} else {
|
|
2826
|
+
this.setUniform("u_deviceRotationRate", "vec3", [0, 0, 0]);
|
|
2827
|
+
}
|
|
2828
|
+
const orientation = internalDevice.orientation;
|
|
2829
|
+
if (orientation) {
|
|
2830
|
+
this.setUniform("u_deviceOrientation", "vec3", [
|
|
2831
|
+
orientation.alpha ?? 0,
|
|
2832
|
+
orientation.beta ?? 0,
|
|
2833
|
+
orientation.gamma ?? 0
|
|
2834
|
+
]);
|
|
2835
|
+
this.setUniform("u_deviceOrientationAbsolute", "bool", orientation.absolute);
|
|
2836
|
+
} else {
|
|
2837
|
+
this.setUniform("u_deviceOrientation", "vec3", [0, 0, 0]);
|
|
2838
|
+
this.setUniform("u_deviceOrientationAbsolute", "bool", false);
|
|
2839
|
+
}
|
|
2840
|
+
const geolocation = internalDevice.geolocation;
|
|
2841
|
+
if (geolocation) {
|
|
2842
|
+
this.setUniform("u_deviceLocation", "vec2", [
|
|
2843
|
+
geolocation.latitude ?? 0,
|
|
2844
|
+
geolocation.longitude ?? 0
|
|
2845
|
+
]);
|
|
2846
|
+
this.setUniform("u_deviceLocationExt", "vec4", [
|
|
2847
|
+
geolocation.altitude ?? 0,
|
|
2848
|
+
geolocation.accuracy ?? 0,
|
|
2849
|
+
geolocation.heading ?? 0,
|
|
2850
|
+
geolocation.speed ?? 0
|
|
2851
|
+
]);
|
|
2852
|
+
} else {
|
|
2853
|
+
this.setUniform("u_deviceLocation", "vec2", [0, 0]);
|
|
2854
|
+
this.setUniform("u_deviceLocationExt", "vec4", [0, 0, 0, 0]);
|
|
2855
|
+
}
|
|
2856
|
+
} else {
|
|
2857
|
+
this.setUniform("u_deviceAcceleration", "vec3", [0, 0, 0]);
|
|
2858
|
+
this.setUniform("u_deviceAccelerationGravity", "vec3", [0, 0, 0]);
|
|
2859
|
+
this.setUniform("u_deviceRotationRate", "vec3", [0, 0, 0]);
|
|
2860
|
+
this.setUniform("u_deviceOrientation", "vec3", [0, 0, 0]);
|
|
2861
|
+
this.setUniform("u_deviceOrientationAbsolute", "bool", false);
|
|
2862
|
+
this.setUniform("u_deviceLocation", "vec2", [0, 0]);
|
|
2863
|
+
this.setUniform("u_deviceLocationExt", "vec4", [0, 0, 0, 0]);
|
|
2864
|
+
}
|
|
2865
|
+
const externalDevices = viji.devices || [];
|
|
2866
|
+
const externalDeviceCount = Math.min(externalDevices.length, ShaderWorkerAdapter.MAX_EXTERNAL_DEVICES);
|
|
2867
|
+
this.setUniform("u_externalDeviceCount", "int", externalDeviceCount);
|
|
2868
|
+
for (let i = 0; i < ShaderWorkerAdapter.MAX_EXTERNAL_DEVICES; i++) {
|
|
2869
|
+
if (i < externalDeviceCount) {
|
|
2870
|
+
const device = externalDevices[i];
|
|
2871
|
+
const motion = device.motion;
|
|
2872
|
+
if (motion?.acceleration) {
|
|
2873
|
+
this.setUniform(`u_device${i}Acceleration`, "vec3", [
|
|
2874
|
+
motion.acceleration.x ?? 0,
|
|
2875
|
+
motion.acceleration.y ?? 0,
|
|
2876
|
+
motion.acceleration.z ?? 0
|
|
2877
|
+
]);
|
|
2878
|
+
} else {
|
|
2879
|
+
this.setUniform(`u_device${i}Acceleration`, "vec3", [0, 0, 0]);
|
|
2880
|
+
}
|
|
2881
|
+
if (motion?.accelerationIncludingGravity) {
|
|
2882
|
+
this.setUniform(`u_device${i}AccelerationGravity`, "vec3", [
|
|
2883
|
+
motion.accelerationIncludingGravity.x ?? 0,
|
|
2884
|
+
motion.accelerationIncludingGravity.y ?? 0,
|
|
2885
|
+
motion.accelerationIncludingGravity.z ?? 0
|
|
2886
|
+
]);
|
|
2887
|
+
} else {
|
|
2888
|
+
this.setUniform(`u_device${i}AccelerationGravity`, "vec3", [0, 0, 0]);
|
|
2889
|
+
}
|
|
2890
|
+
if (motion?.rotationRate) {
|
|
2891
|
+
this.setUniform(`u_device${i}RotationRate`, "vec3", [
|
|
2892
|
+
motion.rotationRate.alpha ?? 0,
|
|
2893
|
+
motion.rotationRate.beta ?? 0,
|
|
2894
|
+
motion.rotationRate.gamma ?? 0
|
|
2895
|
+
]);
|
|
2896
|
+
} else {
|
|
2897
|
+
this.setUniform(`u_device${i}RotationRate`, "vec3", [0, 0, 0]);
|
|
2898
|
+
}
|
|
2899
|
+
const orientation = device.orientation;
|
|
2900
|
+
if (orientation) {
|
|
2901
|
+
this.setUniform(`u_device${i}Orientation`, "vec3", [
|
|
2902
|
+
orientation.alpha ?? 0,
|
|
2903
|
+
orientation.beta ?? 0,
|
|
2904
|
+
orientation.gamma ?? 0
|
|
2905
|
+
]);
|
|
2906
|
+
} else {
|
|
2907
|
+
this.setUniform(`u_device${i}Orientation`, "vec3", [0, 0, 0]);
|
|
2908
|
+
}
|
|
2909
|
+
const geolocation = device.geolocation;
|
|
2910
|
+
if (geolocation) {
|
|
2911
|
+
this.setUniform(`u_device${i}Location`, "vec2", [
|
|
2912
|
+
geolocation.latitude ?? 0,
|
|
2913
|
+
geolocation.longitude ?? 0
|
|
2914
|
+
]);
|
|
2915
|
+
} else {
|
|
2916
|
+
this.setUniform(`u_device${i}Location`, "vec2", [0, 0]);
|
|
2917
|
+
}
|
|
2918
|
+
} else {
|
|
2919
|
+
this.setUniform(`u_device${i}Acceleration`, "vec3", [0, 0, 0]);
|
|
2920
|
+
this.setUniform(`u_device${i}AccelerationGravity`, "vec3", [0, 0, 0]);
|
|
2921
|
+
this.setUniform(`u_device${i}RotationRate`, "vec3", [0, 0, 0]);
|
|
2922
|
+
this.setUniform(`u_device${i}Orientation`, "vec3", [0, 0, 0]);
|
|
2923
|
+
this.setUniform(`u_device${i}Location`, "vec2", [0, 0]);
|
|
2924
|
+
}
|
|
2925
|
+
}
|
|
2584
2926
|
}
|
|
2585
2927
|
/**
|
|
2586
2928
|
* Update parameter uniforms from parameter objects
|
|
@@ -2588,7 +2930,12 @@ ${error}`);
|
|
|
2588
2930
|
updateParameterUniforms(parameterObjects) {
|
|
2589
2931
|
for (const param of this.parameters) {
|
|
2590
2932
|
const paramObj = parameterObjects.get(param.uniformName);
|
|
2591
|
-
if (!paramObj)
|
|
2933
|
+
if (!paramObj) {
|
|
2934
|
+
if (Math.random() < 0.01) {
|
|
2935
|
+
console.log(`[ShaderAdapter] Parameter '${param.uniformName}' not found in parameterObjects`);
|
|
2936
|
+
}
|
|
2937
|
+
continue;
|
|
2938
|
+
}
|
|
2592
2939
|
const value = paramObj.value;
|
|
2593
2940
|
switch (param.type) {
|
|
2594
2941
|
case "slider":
|
|
@@ -2620,6 +2967,9 @@ ${error}`);
|
|
|
2620
2967
|
setUniform(name, type, value) {
|
|
2621
2968
|
const location = this.uniformLocations.get(name);
|
|
2622
2969
|
if (location === null || location === void 0) {
|
|
2970
|
+
if (name.includes("[") && Math.random() < 0.01) {
|
|
2971
|
+
console.log(`[ShaderAdapter] Uniform '${name}' not found (location is ${location})`);
|
|
2972
|
+
}
|
|
2623
2973
|
return;
|
|
2624
2974
|
}
|
|
2625
2975
|
const gl = this.gl;
|
|
@@ -2691,6 +3041,7 @@ ${error}`);
|
|
|
2691
3041
|
}
|
|
2692
3042
|
/**
|
|
2693
3043
|
* Update video texture
|
|
3044
|
+
* Supports both OffscreenCanvas and ImageBitmap
|
|
2694
3045
|
*/
|
|
2695
3046
|
updateVideoTexture(videoFrame) {
|
|
2696
3047
|
const gl = this.gl;
|
|
@@ -2700,6 +3051,10 @@ ${error}`);
|
|
|
2700
3051
|
}
|
|
2701
3052
|
gl.activeTexture(gl.TEXTURE0 + unit);
|
|
2702
3053
|
gl.bindTexture(gl.TEXTURE_2D, this.videoTexture);
|
|
3054
|
+
const shouldFlip = videoFrame instanceof OffscreenCanvas;
|
|
3055
|
+
if (shouldFlip) {
|
|
3056
|
+
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
|
|
3057
|
+
}
|
|
2703
3058
|
gl.texImage2D(
|
|
2704
3059
|
gl.TEXTURE_2D,
|
|
2705
3060
|
0,
|
|
@@ -2708,6 +3063,9 @@ ${error}`);
|
|
|
2708
3063
|
gl.UNSIGNED_BYTE,
|
|
2709
3064
|
videoFrame
|
|
2710
3065
|
);
|
|
3066
|
+
if (shouldFlip) {
|
|
3067
|
+
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
|
|
3068
|
+
}
|
|
2711
3069
|
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
|
|
2712
3070
|
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
|
|
2713
3071
|
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
|
|
@@ -2716,6 +3074,49 @@ ${error}`);
|
|
|
2716
3074
|
if (location) {
|
|
2717
3075
|
gl.uniform1i(location, unit);
|
|
2718
3076
|
}
|
|
3077
|
+
if (videoFrame instanceof ImageBitmap) {
|
|
3078
|
+
videoFrame.close();
|
|
3079
|
+
}
|
|
3080
|
+
}
|
|
3081
|
+
/**
|
|
3082
|
+
* Update compositor stream texture at specified index
|
|
3083
|
+
* Supports both OffscreenCanvas and ImageBitmap for zero-copy pipeline
|
|
3084
|
+
*/
|
|
3085
|
+
updateStreamTexture(index, streamFrame) {
|
|
3086
|
+
const gl = this.gl;
|
|
3087
|
+
const uniformName = `u_stream${index}`;
|
|
3088
|
+
const unit = this.textureUnits.get(uniformName);
|
|
3089
|
+
if (!this.streamTextures[index]) {
|
|
3090
|
+
this.streamTextures[index] = gl.createTexture();
|
|
3091
|
+
}
|
|
3092
|
+
gl.activeTexture(gl.TEXTURE0 + unit);
|
|
3093
|
+
gl.bindTexture(gl.TEXTURE_2D, this.streamTextures[index]);
|
|
3094
|
+
const shouldFlip = streamFrame instanceof OffscreenCanvas;
|
|
3095
|
+
if (shouldFlip) {
|
|
3096
|
+
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
|
|
3097
|
+
}
|
|
3098
|
+
gl.texImage2D(
|
|
3099
|
+
gl.TEXTURE_2D,
|
|
3100
|
+
0,
|
|
3101
|
+
gl.RGBA,
|
|
3102
|
+
gl.RGBA,
|
|
3103
|
+
gl.UNSIGNED_BYTE,
|
|
3104
|
+
streamFrame
|
|
3105
|
+
);
|
|
3106
|
+
if (shouldFlip) {
|
|
3107
|
+
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
|
|
3108
|
+
}
|
|
3109
|
+
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
|
|
3110
|
+
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
|
|
3111
|
+
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
|
|
3112
|
+
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
|
|
3113
|
+
const location = this.uniformLocations.get(uniformName);
|
|
3114
|
+
if (location) {
|
|
3115
|
+
gl.uniform1i(location, unit);
|
|
3116
|
+
}
|
|
3117
|
+
if (streamFrame instanceof ImageBitmap) {
|
|
3118
|
+
streamFrame.close();
|
|
3119
|
+
}
|
|
2719
3120
|
}
|
|
2720
3121
|
/**
|
|
2721
3122
|
* Update segmentation mask texture
|
|
@@ -2800,6 +3201,32 @@ ${error}`);
|
|
|
2800
3201
|
getParameterDefinitions() {
|
|
2801
3202
|
return this.parameters;
|
|
2802
3203
|
}
|
|
3204
|
+
/**
|
|
3205
|
+
* Cleanup resources
|
|
3206
|
+
*/
|
|
3207
|
+
destroy() {
|
|
3208
|
+
const gl = this.gl;
|
|
3209
|
+
if (this.program) {
|
|
3210
|
+
gl.deleteProgram(this.program);
|
|
3211
|
+
this.program = null;
|
|
3212
|
+
}
|
|
3213
|
+
if (this.audioFFTTexture) gl.deleteTexture(this.audioFFTTexture);
|
|
3214
|
+
if (this.videoTexture) gl.deleteTexture(this.videoTexture);
|
|
3215
|
+
if (this.segmentationTexture) gl.deleteTexture(this.segmentationTexture);
|
|
3216
|
+
for (const texture of this.streamTextures) {
|
|
3217
|
+
if (texture) gl.deleteTexture(texture);
|
|
3218
|
+
}
|
|
3219
|
+
this.streamTextures = [];
|
|
3220
|
+
for (const texture of this.textures.values()) {
|
|
3221
|
+
if (texture) gl.deleteTexture(texture);
|
|
3222
|
+
}
|
|
3223
|
+
this.textures.clear();
|
|
3224
|
+
if (this.backbufferFramebuffer) gl.deleteFramebuffer(this.backbufferFramebuffer);
|
|
3225
|
+
if (this.backbufferTexture) gl.deleteTexture(this.backbufferTexture);
|
|
3226
|
+
if (this.currentFramebuffer) gl.deleteFramebuffer(this.currentFramebuffer);
|
|
3227
|
+
if (this.currentTexture) gl.deleteTexture(this.currentTexture);
|
|
3228
|
+
if (this.quadBuffer) gl.deleteBuffer(this.quadBuffer);
|
|
3229
|
+
}
|
|
2803
3230
|
}
|
|
2804
3231
|
class VijiWorkerRuntime {
|
|
2805
3232
|
canvas = null;
|
|
@@ -2827,7 +3254,7 @@ class VijiWorkerRuntime {
|
|
|
2827
3254
|
*/
|
|
2828
3255
|
setDebugMode(enabled) {
|
|
2829
3256
|
this.debugMode = enabled;
|
|
2830
|
-
|
|
3257
|
+
this.videoSystems.forEach((vs) => vs?.setDebugMode(enabled));
|
|
2831
3258
|
if (this.parameterSystem && "setDebugMode" in this.parameterSystem) {
|
|
2832
3259
|
this.parameterSystem.setDebugMode(enabled);
|
|
2833
3260
|
}
|
|
@@ -2852,24 +3279,61 @@ class VijiWorkerRuntime {
|
|
|
2852
3279
|
parameterSystem;
|
|
2853
3280
|
// Interaction system (Phase 7)
|
|
2854
3281
|
interactionSystem;
|
|
2855
|
-
// Video
|
|
2856
|
-
|
|
3282
|
+
// Video systems (multi-stream) - index 0 = main with CV, 1+ = additional without CV
|
|
3283
|
+
videoSystems = [];
|
|
3284
|
+
hasMainVideoStream = false;
|
|
3285
|
+
// Track if videoSystems[0] is main stream
|
|
3286
|
+
// Auto-capture for frame sources
|
|
3287
|
+
autoCaptureEnabled = false;
|
|
3288
|
+
autoCaptureFormat = { flipY: true };
|
|
3289
|
+
// Default: flip for WebGL compatibility
|
|
2857
3290
|
// Audio state (Phase 5) - receives analysis results from host
|
|
2858
3291
|
audioState = {
|
|
2859
3292
|
isConnected: false,
|
|
2860
|
-
volume: {
|
|
3293
|
+
volume: { current: 0, peak: 0, smoothed: 0 },
|
|
2861
3294
|
bands: {
|
|
2862
|
-
|
|
2863
|
-
mid: 0,
|
|
2864
|
-
treble: 0,
|
|
2865
|
-
subBass: 0,
|
|
3295
|
+
low: 0,
|
|
2866
3296
|
lowMid: 0,
|
|
3297
|
+
mid: 0,
|
|
2867
3298
|
highMid: 0,
|
|
2868
|
-
|
|
2869
|
-
|
|
3299
|
+
high: 0,
|
|
3300
|
+
lowSmoothed: 0,
|
|
3301
|
+
lowMidSmoothed: 0,
|
|
3302
|
+
midSmoothed: 0,
|
|
3303
|
+
highMidSmoothed: 0,
|
|
3304
|
+
highSmoothed: 0
|
|
3305
|
+
},
|
|
3306
|
+
beat: {
|
|
3307
|
+
kick: 0,
|
|
3308
|
+
snare: 0,
|
|
3309
|
+
hat: 0,
|
|
3310
|
+
any: 0,
|
|
3311
|
+
kickSmoothed: 0,
|
|
3312
|
+
snareSmoothed: 0,
|
|
3313
|
+
anySmoothed: 0,
|
|
3314
|
+
triggers: { any: false, kick: false, snare: false, hat: false },
|
|
3315
|
+
bpm: 120,
|
|
3316
|
+
phase: 0,
|
|
3317
|
+
bar: 0,
|
|
3318
|
+
confidence: 0,
|
|
3319
|
+
isLocked: false
|
|
3320
|
+
},
|
|
3321
|
+
spectral: {
|
|
3322
|
+
brightness: 0,
|
|
3323
|
+
flatness: 0,
|
|
3324
|
+
flux: 0
|
|
2870
3325
|
},
|
|
2871
3326
|
frequencyData: new Uint8Array(0)
|
|
2872
3327
|
};
|
|
3328
|
+
// Device sensor state (internal device + external devices)
|
|
3329
|
+
deviceState = {
|
|
3330
|
+
device: {
|
|
3331
|
+
motion: null,
|
|
3332
|
+
orientation: null,
|
|
3333
|
+
geolocation: null
|
|
3334
|
+
},
|
|
3335
|
+
devices: []
|
|
3336
|
+
};
|
|
2873
3337
|
// Video state is now managed by the worker-side VideoSystem
|
|
2874
3338
|
// Artist API object
|
|
2875
3339
|
viji = {
|
|
@@ -2887,6 +3351,7 @@ class VijiWorkerRuntime {
|
|
|
2887
3351
|
fps: 60,
|
|
2888
3352
|
// Audio API (Phase 5) - will be set in constructor
|
|
2889
3353
|
audio: {},
|
|
3354
|
+
// Main video stream (index 0, CV enabled)
|
|
2890
3355
|
video: {
|
|
2891
3356
|
isConnected: false,
|
|
2892
3357
|
currentFrame: null,
|
|
@@ -2895,12 +3360,24 @@ class VijiWorkerRuntime {
|
|
|
2895
3360
|
frameRate: 0,
|
|
2896
3361
|
getFrameData: () => null,
|
|
2897
3362
|
faces: [],
|
|
2898
|
-
hands: []
|
|
3363
|
+
hands: [],
|
|
3364
|
+
pose: null,
|
|
3365
|
+
segmentation: null,
|
|
3366
|
+
cv: {}
|
|
2899
3367
|
},
|
|
3368
|
+
// Additional video streams (index 1+, no CV)
|
|
3369
|
+
streams: [],
|
|
2900
3370
|
// Interaction APIs will be added during construction
|
|
2901
3371
|
mouse: {},
|
|
2902
3372
|
keyboard: {},
|
|
2903
3373
|
touches: {},
|
|
3374
|
+
// Device sensor APIs (internal device + external devices)
|
|
3375
|
+
device: {
|
|
3376
|
+
motion: null,
|
|
3377
|
+
orientation: null,
|
|
3378
|
+
geolocation: null
|
|
3379
|
+
},
|
|
3380
|
+
devices: [],
|
|
2904
3381
|
// Parameter helper functions (return parameter objects) - delegate to parameter system
|
|
2905
3382
|
slider: (defaultValue, config) => {
|
|
2906
3383
|
return this.parameterSystem.createSliderParameter(defaultValue, config);
|
|
@@ -2949,9 +3426,7 @@ class VijiWorkerRuntime {
|
|
|
2949
3426
|
this.postMessage(type, data);
|
|
2950
3427
|
});
|
|
2951
3428
|
this.interactionSystem = new InteractionSystem();
|
|
2952
|
-
this.videoSystem = new VideoSystem();
|
|
2953
3429
|
Object.assign(this.viji, this.interactionSystem.getInteractionAPIs());
|
|
2954
|
-
Object.assign(this.viji.video, this.videoSystem.getVideoAPI());
|
|
2955
3430
|
this.viji.audio = {
|
|
2956
3431
|
...this.audioState,
|
|
2957
3432
|
getFrequencyData: () => this.audioState.frequencyData
|
|
@@ -3057,16 +3532,38 @@ class VijiWorkerRuntime {
|
|
|
3057
3532
|
this.interactionSystem.resetInteractionState();
|
|
3058
3533
|
this.audioState = {
|
|
3059
3534
|
isConnected: false,
|
|
3060
|
-
volume: {
|
|
3535
|
+
volume: { current: 0, peak: 0, smoothed: 0 },
|
|
3061
3536
|
bands: {
|
|
3062
|
-
|
|
3063
|
-
mid: 0,
|
|
3064
|
-
treble: 0,
|
|
3065
|
-
subBass: 0,
|
|
3537
|
+
low: 0,
|
|
3066
3538
|
lowMid: 0,
|
|
3539
|
+
mid: 0,
|
|
3067
3540
|
highMid: 0,
|
|
3068
|
-
|
|
3069
|
-
|
|
3541
|
+
high: 0,
|
|
3542
|
+
lowSmoothed: 0,
|
|
3543
|
+
lowMidSmoothed: 0,
|
|
3544
|
+
midSmoothed: 0,
|
|
3545
|
+
highMidSmoothed: 0,
|
|
3546
|
+
highSmoothed: 0
|
|
3547
|
+
},
|
|
3548
|
+
beat: {
|
|
3549
|
+
kick: 0,
|
|
3550
|
+
snare: 0,
|
|
3551
|
+
hat: 0,
|
|
3552
|
+
any: 0,
|
|
3553
|
+
kickSmoothed: 0,
|
|
3554
|
+
snareSmoothed: 0,
|
|
3555
|
+
anySmoothed: 0,
|
|
3556
|
+
triggers: { any: false, kick: false, snare: false, hat: false },
|
|
3557
|
+
bpm: 120,
|
|
3558
|
+
phase: 0,
|
|
3559
|
+
bar: 0,
|
|
3560
|
+
confidence: 0,
|
|
3561
|
+
isLocked: false
|
|
3562
|
+
},
|
|
3563
|
+
spectral: {
|
|
3564
|
+
brightness: 0,
|
|
3565
|
+
flatness: 0,
|
|
3566
|
+
flux: 0
|
|
3070
3567
|
},
|
|
3071
3568
|
frequencyData: new Uint8Array(0)
|
|
3072
3569
|
};
|
|
@@ -3074,8 +3571,37 @@ class VijiWorkerRuntime {
|
|
|
3074
3571
|
...this.audioState,
|
|
3075
3572
|
getFrequencyData: () => this.audioState.frequencyData
|
|
3076
3573
|
};
|
|
3077
|
-
this.
|
|
3078
|
-
|
|
3574
|
+
this.videoSystems.forEach((vs) => vs.resetVideoState());
|
|
3575
|
+
if (this.videoSystems[0]) {
|
|
3576
|
+
Object.assign(this.viji.video, this.videoSystems[0].getVideoAPI());
|
|
3577
|
+
}
|
|
3578
|
+
this.updateVijiStreams();
|
|
3579
|
+
}
|
|
3580
|
+
/**
|
|
3581
|
+
* Updates viji.streams from videoSystems array
|
|
3582
|
+
* Slices based on whether there's a main stream at index 0
|
|
3583
|
+
*/
|
|
3584
|
+
updateVijiStreams() {
|
|
3585
|
+
const startIndex = this.hasMainVideoStream ? 1 : 0;
|
|
3586
|
+
const freshStreams = this.videoSystems.slice(startIndex).map((vs) => {
|
|
3587
|
+
const api = vs?.getVideoAPI() || {
|
|
3588
|
+
isConnected: false,
|
|
3589
|
+
currentFrame: null,
|
|
3590
|
+
frameWidth: 0,
|
|
3591
|
+
frameHeight: 0,
|
|
3592
|
+
frameRate: 0,
|
|
3593
|
+
getFrameData: () => null
|
|
3594
|
+
};
|
|
3595
|
+
return api;
|
|
3596
|
+
});
|
|
3597
|
+
this.viji.streams.length = freshStreams.length;
|
|
3598
|
+
for (let i = 0; i < freshStreams.length; i++) {
|
|
3599
|
+
if (this.viji.streams[i]) {
|
|
3600
|
+
Object.assign(this.viji.streams[i], freshStreams[i]);
|
|
3601
|
+
} else {
|
|
3602
|
+
this.viji.streams[i] = freshStreams[i];
|
|
3603
|
+
}
|
|
3604
|
+
}
|
|
3079
3605
|
}
|
|
3080
3606
|
// Send all parameters (from helper functions) to host
|
|
3081
3607
|
sendAllParametersToHost() {
|
|
@@ -3127,6 +3653,21 @@ class VijiWorkerRuntime {
|
|
|
3127
3653
|
case "video-config-update":
|
|
3128
3654
|
this.handleVideoConfigUpdate(message);
|
|
3129
3655
|
break;
|
|
3656
|
+
case "video-streams-prepare":
|
|
3657
|
+
this.handleVideoStreamsPrepare(message);
|
|
3658
|
+
break;
|
|
3659
|
+
case "video-frame-direct":
|
|
3660
|
+
this.handleVideoFrameDirect(message);
|
|
3661
|
+
break;
|
|
3662
|
+
case "enable-auto-capture":
|
|
3663
|
+
this.autoCaptureEnabled = message.data.enabled;
|
|
3664
|
+
if (message.data.flipY !== void 0) {
|
|
3665
|
+
this.autoCaptureFormat.flipY = message.data.flipY;
|
|
3666
|
+
this.debugLog(`[AutoCapture] ${message.data.enabled ? "ENABLED" : "DISABLED"} with flipY=${message.data.flipY}`);
|
|
3667
|
+
} else {
|
|
3668
|
+
this.debugLog(`[AutoCapture] ${message.data.enabled ? "ENABLED" : "DISABLED"}`);
|
|
3669
|
+
}
|
|
3670
|
+
break;
|
|
3130
3671
|
case "mouse-update":
|
|
3131
3672
|
this.handleMouseUpdate(message);
|
|
3132
3673
|
break;
|
|
@@ -3139,6 +3680,9 @@ class VijiWorkerRuntime {
|
|
|
3139
3680
|
case "interaction-enabled":
|
|
3140
3681
|
this.handleInteractionEnabled(message);
|
|
3141
3682
|
break;
|
|
3683
|
+
case "device-state-update":
|
|
3684
|
+
this.handleDeviceStateUpdate(message);
|
|
3685
|
+
break;
|
|
3142
3686
|
case "performance-update":
|
|
3143
3687
|
this.handlePerformanceUpdate(message);
|
|
3144
3688
|
break;
|
|
@@ -3185,8 +3729,8 @@ class VijiWorkerRuntime {
|
|
|
3185
3729
|
handleCVFrameRateUpdate(message) {
|
|
3186
3730
|
if (message.data && message.data.mode) {
|
|
3187
3731
|
const sceneProcessingFPS = this.frameRateMode === "full" ? this.screenRefreshRate : this.screenRefreshRate / 2;
|
|
3188
|
-
if (this.
|
|
3189
|
-
this.
|
|
3732
|
+
if (this.videoSystems[0]) {
|
|
3733
|
+
this.videoSystems[0].handleVideoConfigUpdate({
|
|
3190
3734
|
cvFrameRate: {
|
|
3191
3735
|
mode: message.data.mode,
|
|
3192
3736
|
sceneTargetFPS: sceneProcessingFPS
|
|
@@ -3209,7 +3753,12 @@ class VijiWorkerRuntime {
|
|
|
3209
3753
|
const totalTime = this.effectiveFrameTimes[this.effectiveFrameTimes.length - 1] - this.effectiveFrameTimes[0];
|
|
3210
3754
|
const frameCount = this.effectiveFrameTimes.length - 1;
|
|
3211
3755
|
const effectiveRefreshRate = Math.round(frameCount / totalTime * 1e3);
|
|
3212
|
-
const cvStats = this.
|
|
3756
|
+
const cvStats = this.videoSystems[0]?.getCVStats() || {
|
|
3757
|
+
activeFeatures: [],
|
|
3758
|
+
processingTime: 0,
|
|
3759
|
+
actualFPS: 0,
|
|
3760
|
+
isProcessing: false
|
|
3761
|
+
};
|
|
3213
3762
|
this.postMessage("performance-update", {
|
|
3214
3763
|
effectiveRefreshRate,
|
|
3215
3764
|
frameRateMode: this.frameRateMode,
|
|
@@ -3267,10 +3816,23 @@ class VijiWorkerRuntime {
|
|
|
3267
3816
|
this.debugLog("Stream update:", message.data);
|
|
3268
3817
|
}
|
|
3269
3818
|
handleAudioAnalysisUpdate(message) {
|
|
3819
|
+
const events = message.data.beat.events || [];
|
|
3820
|
+
const triggers = {
|
|
3821
|
+
kick: events.some((e) => e.type === "kick"),
|
|
3822
|
+
snare: events.some((e) => e.type === "snare"),
|
|
3823
|
+
hat: events.some((e) => e.type === "hat"),
|
|
3824
|
+
any: events.length > 0
|
|
3825
|
+
};
|
|
3270
3826
|
this.audioState = {
|
|
3271
3827
|
isConnected: message.data.isConnected,
|
|
3272
3828
|
volume: message.data.volume,
|
|
3273
3829
|
bands: message.data.bands,
|
|
3830
|
+
beat: {
|
|
3831
|
+
...message.data.beat,
|
|
3832
|
+
triggers
|
|
3833
|
+
// Add derived triggers (reliable!)
|
|
3834
|
+
},
|
|
3835
|
+
spectral: message.data.spectral,
|
|
3274
3836
|
frequencyData: new Uint8Array(message.data.frequencyData)
|
|
3275
3837
|
};
|
|
3276
3838
|
this.viji.audio = {
|
|
@@ -3279,31 +3841,81 @@ class VijiWorkerRuntime {
|
|
|
3279
3841
|
};
|
|
3280
3842
|
}
|
|
3281
3843
|
handleVideoCanvasSetup(message) {
|
|
3282
|
-
|
|
3844
|
+
const { streamIndex, isMain } = message.data;
|
|
3845
|
+
const index = streamIndex || 0;
|
|
3846
|
+
const videoSystem = new VideoSystem();
|
|
3847
|
+
videoSystem.setDebugMode(this.debugMode);
|
|
3848
|
+
videoSystem.handleCanvasSetup({
|
|
3283
3849
|
offscreenCanvas: message.data.offscreenCanvas,
|
|
3284
3850
|
width: message.data.width,
|
|
3285
3851
|
height: message.data.height,
|
|
3286
3852
|
timestamp: message.data.timestamp
|
|
3287
3853
|
});
|
|
3288
|
-
|
|
3854
|
+
this.videoSystems[index] = videoSystem;
|
|
3855
|
+
this.debugLog(`Video system setup at index ${index}, isMain: ${isMain}`);
|
|
3856
|
+
if (index === 0) {
|
|
3857
|
+
Object.assign(this.viji.video, videoSystem.getVideoAPI());
|
|
3858
|
+
} else {
|
|
3859
|
+
this.updateVijiStreams();
|
|
3860
|
+
}
|
|
3289
3861
|
}
|
|
3290
3862
|
handleVideoFrameUpdate(message) {
|
|
3291
|
-
|
|
3292
|
-
|
|
3293
|
-
|
|
3294
|
-
|
|
3295
|
-
|
|
3863
|
+
const index = message.data.streamIndex || 0;
|
|
3864
|
+
if (this.videoSystems[index]) {
|
|
3865
|
+
this.videoSystems[index].handleFrameUpdate({
|
|
3866
|
+
imageBitmap: message.data.imageBitmap,
|
|
3867
|
+
timestamp: message.data.timestamp
|
|
3868
|
+
});
|
|
3869
|
+
if (index === 0) {
|
|
3870
|
+
Object.assign(this.viji.video, this.videoSystems[index].getVideoAPI());
|
|
3871
|
+
}
|
|
3872
|
+
}
|
|
3296
3873
|
}
|
|
3297
3874
|
handleVideoConfigUpdate(message) {
|
|
3298
|
-
|
|
3299
|
-
|
|
3300
|
-
|
|
3301
|
-
|
|
3302
|
-
|
|
3303
|
-
|
|
3304
|
-
|
|
3305
|
-
|
|
3306
|
-
|
|
3875
|
+
const index = message.data.streamIndex || 0;
|
|
3876
|
+
if (this.videoSystems[index]) {
|
|
3877
|
+
this.videoSystems[index].handleVideoConfigUpdate({
|
|
3878
|
+
...message.data.targetFrameRate && { targetFrameRate: message.data.targetFrameRate },
|
|
3879
|
+
...message.data.cvConfig && { cvConfig: message.data.cvConfig },
|
|
3880
|
+
...message.data.width && { width: message.data.width },
|
|
3881
|
+
...message.data.height && { height: message.data.height },
|
|
3882
|
+
...message.data.disconnect && { disconnect: message.data.disconnect },
|
|
3883
|
+
timestamp: message.data.timestamp
|
|
3884
|
+
});
|
|
3885
|
+
if (index === 0) {
|
|
3886
|
+
Object.assign(this.viji.video, this.videoSystems[index].getVideoAPI());
|
|
3887
|
+
}
|
|
3888
|
+
}
|
|
3889
|
+
}
|
|
3890
|
+
handleVideoStreamsPrepare(message) {
|
|
3891
|
+
const { mainStream, mediaStreamCount, directFrameCount } = message.data;
|
|
3892
|
+
const totalStreams = (mainStream ? 1 : 0) + mediaStreamCount + directFrameCount;
|
|
3893
|
+
this.hasMainVideoStream = mainStream;
|
|
3894
|
+
this.debugLog(`[Compositor] Preparing video streams: main=${mainStream}, media=${mediaStreamCount}, direct=${directFrameCount}, total=${totalStreams}`);
|
|
3895
|
+
while (this.videoSystems.length < totalStreams) {
|
|
3896
|
+
this.videoSystems.push(new VideoSystem());
|
|
3897
|
+
}
|
|
3898
|
+
const directFrameStartIndex = (mainStream ? 1 : 0) + mediaStreamCount;
|
|
3899
|
+
for (let i = 0; i < directFrameCount; i++) {
|
|
3900
|
+
const index = directFrameStartIndex + i;
|
|
3901
|
+
if (!this.videoSystems[index]) {
|
|
3902
|
+
this.videoSystems[index] = new VideoSystem();
|
|
3903
|
+
}
|
|
3904
|
+
this.videoSystems[index].setDebugMode(this.debugMode);
|
|
3905
|
+
this.videoSystems[index].initializeForDirectFrames(this.rendererType);
|
|
3906
|
+
}
|
|
3907
|
+
this.updateVijiStreams();
|
|
3908
|
+
this.debugLog(`Prepared ${totalStreams} video systems (${directFrameCount} direct frames)`);
|
|
3909
|
+
}
|
|
3910
|
+
handleVideoFrameDirect(message) {
|
|
3911
|
+
const index = message.data.streamIndex || 0;
|
|
3912
|
+
if (!this.videoSystems[index]) {
|
|
3913
|
+
this.debugLog(`[Compositor] Creating new VideoSystem at index ${index} for direct frames`);
|
|
3914
|
+
this.videoSystems[index] = new VideoSystem();
|
|
3915
|
+
this.videoSystems[index].setDebugMode(this.debugMode);
|
|
3916
|
+
this.videoSystems[index].initializeForDirectFrames(this.rendererType);
|
|
3917
|
+
}
|
|
3918
|
+
this.videoSystems[index].handleDirectFrame(message.data);
|
|
3307
3919
|
}
|
|
3308
3920
|
handlePerformanceUpdate(message) {
|
|
3309
3921
|
this.debugLog("Performance update:", message.data);
|
|
@@ -3314,7 +3926,7 @@ class VijiWorkerRuntime {
|
|
|
3314
3926
|
*/
|
|
3315
3927
|
async handleCaptureFrame(message) {
|
|
3316
3928
|
this.pendingCaptures.push(message);
|
|
3317
|
-
this.debugLog(`Capture
|
|
3929
|
+
this.debugLog(`Capture queued: ${message.data.format || "blob"} (${this.pendingCaptures.length} pending)`);
|
|
3318
3930
|
}
|
|
3319
3931
|
/**
|
|
3320
3932
|
* Execute a capture frame request immediately after render completes.
|
|
@@ -3325,6 +3937,17 @@ class VijiWorkerRuntime {
|
|
|
3325
3937
|
if (!this.canvas) {
|
|
3326
3938
|
throw new Error("Canvas not initialized");
|
|
3327
3939
|
}
|
|
3940
|
+
const format = message.data.format || "blob";
|
|
3941
|
+
if (format === "bitmap") {
|
|
3942
|
+
const bitmap = this.canvas.transferToImageBitmap();
|
|
3943
|
+
self.postMessage({
|
|
3944
|
+
type: "capture-frame-result",
|
|
3945
|
+
id: message.id,
|
|
3946
|
+
timestamp: Date.now(),
|
|
3947
|
+
data: { bitmap }
|
|
3948
|
+
}, [bitmap]);
|
|
3949
|
+
return;
|
|
3950
|
+
}
|
|
3328
3951
|
const mimeType = message.data.type || "image/jpeg";
|
|
3329
3952
|
const srcWidth = this.canvas.width;
|
|
3330
3953
|
const srcHeight = this.canvas.height;
|
|
@@ -3381,18 +4004,12 @@ class VijiWorkerRuntime {
|
|
|
3381
4004
|
if (!tctx) throw new Error("Failed to get 2D context");
|
|
3382
4005
|
tctx.drawImage(sourceCanvas, sx, sy, sWidth, sHeight, 0, 0, targetWidth, targetHeight);
|
|
3383
4006
|
const blob = await temp.convertToBlob({ type: mimeType });
|
|
3384
|
-
const arrayBuffer = await blob.arrayBuffer();
|
|
3385
4007
|
self.postMessage({
|
|
3386
4008
|
type: "capture-frame-result",
|
|
3387
4009
|
id: message.id,
|
|
3388
4010
|
timestamp: Date.now(),
|
|
3389
|
-
data: {
|
|
3390
|
-
|
|
3391
|
-
buffer: arrayBuffer,
|
|
3392
|
-
width: targetWidth,
|
|
3393
|
-
height: targetHeight
|
|
3394
|
-
}
|
|
3395
|
-
}, [arrayBuffer]);
|
|
4011
|
+
data: { blob }
|
|
4012
|
+
});
|
|
3396
4013
|
} catch (error) {
|
|
3397
4014
|
this.postMessage("error", {
|
|
3398
4015
|
id: message.id,
|
|
@@ -3416,6 +4033,7 @@ class VijiWorkerRuntime {
|
|
|
3416
4033
|
if (!this.isRunning) return;
|
|
3417
4034
|
const currentTime = performance.now();
|
|
3418
4035
|
this.interactionSystem.frameStart();
|
|
4036
|
+
this.updateVijiStreams();
|
|
3419
4037
|
this.viji.fps = this.frameRateMode === "full" ? this.screenRefreshRate : this.screenRefreshRate / 2;
|
|
3420
4038
|
let shouldRender = true;
|
|
3421
4039
|
if (this.frameRateMode === "half") {
|
|
@@ -3458,6 +4076,29 @@ class VijiWorkerRuntime {
|
|
|
3458
4076
|
});
|
|
3459
4077
|
}
|
|
3460
4078
|
}
|
|
4079
|
+
if (this.autoCaptureEnabled && this.canvas) {
|
|
4080
|
+
try {
|
|
4081
|
+
const ctx = this.canvas.getContext("2d") || this.canvas.getContext("webgl2") || this.canvas.getContext("webgl");
|
|
4082
|
+
if (ctx) {
|
|
4083
|
+
const options = this.autoCaptureFormat.flipY ? { imageOrientation: "flipY" } : {};
|
|
4084
|
+
createImageBitmap(this.canvas, options).then((bitmap) => {
|
|
4085
|
+
self.postMessage({
|
|
4086
|
+
type: "auto-capture-result",
|
|
4087
|
+
timestamp: Date.now(),
|
|
4088
|
+
data: { bitmap, timestamp: performance.now() }
|
|
4089
|
+
}, [bitmap]);
|
|
4090
|
+
}).catch((err) => {
|
|
4091
|
+
console.warn("[AutoCapture] ImageBitmap creation failed:", err);
|
|
4092
|
+
});
|
|
4093
|
+
} else {
|
|
4094
|
+
if (this.debugMode && this.frameCount % 60 === 0) {
|
|
4095
|
+
this.debugLog("[AutoCapture] No context yet, skipping capture");
|
|
4096
|
+
}
|
|
4097
|
+
}
|
|
4098
|
+
} catch (error) {
|
|
4099
|
+
console.warn("[AutoCapture] Failed:", error);
|
|
4100
|
+
}
|
|
4101
|
+
}
|
|
3461
4102
|
}
|
|
3462
4103
|
this.reportPerformanceStats(currentTime);
|
|
3463
4104
|
requestAnimationFrame(() => this.renderFrame());
|
|
@@ -3483,6 +4124,11 @@ class VijiWorkerRuntime {
|
|
|
3483
4124
|
handleInteractionEnabled(message) {
|
|
3484
4125
|
this.interactionSystem.setInteractionEnabled(message.data.enabled);
|
|
3485
4126
|
}
|
|
4127
|
+
handleDeviceStateUpdate(message) {
|
|
4128
|
+
this.deviceState = message.data;
|
|
4129
|
+
this.viji.device = this.deviceState.device;
|
|
4130
|
+
this.viji.devices = this.deviceState.devices;
|
|
4131
|
+
}
|
|
3486
4132
|
}
|
|
3487
4133
|
class SceneAnalyzer {
|
|
3488
4134
|
/**
|
|
@@ -3527,8 +4173,10 @@ async function setSceneCode(sceneCode) {
|
|
|
3527
4173
|
runtime.sendAllParametersToHost();
|
|
3528
4174
|
} else {
|
|
3529
4175
|
const functionBody = sceneCode + '\nif (typeof render === "function") {\n return render;\n}\nthrow new Error("Scene code must define a render function");';
|
|
3530
|
-
const
|
|
3531
|
-
|
|
4176
|
+
const AsyncFunction = Object.getPrototypeOf(async function() {
|
|
4177
|
+
}).constructor;
|
|
4178
|
+
const sceneFunction = new AsyncFunction("viji", functionBody);
|
|
4179
|
+
renderFunction = await sceneFunction(runtime.viji);
|
|
3532
4180
|
self.renderFunction = renderFunction;
|
|
3533
4181
|
runtime.sendAllParametersToHost();
|
|
3534
4182
|
}
|
|
@@ -3546,4 +4194,4 @@ async function setSceneCode(sceneCode) {
|
|
|
3546
4194
|
}
|
|
3547
4195
|
}
|
|
3548
4196
|
self.setSceneCode = setSceneCode;
|
|
3549
|
-
//# sourceMappingURL=viji.worker-
|
|
4197
|
+
//# sourceMappingURL=viji.worker-Be0jZvYj.js.map
|