@viji-dev/core 0.3.0 → 0.3.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/artist-dts-p5.js +1 -1
- package/dist/artist-dts.js +1 -1
- package/dist/artist-global.d.ts +3 -1
- package/dist/artist-js-ambient.d.ts +47 -39
- package/dist/artist-jsdoc.d.ts +47 -39
- package/dist/assets/{viji.worker-b3XR7zKX.js → viji.worker-BGYUuaFF.js} +320 -37
- package/dist/assets/viji.worker-BGYUuaFF.js.map +1 -0
- package/dist/{essentia-wasm.web-CO8uMw0d.js → essentia-wasm.web-Ca8e6Ylt.js} +2 -2
- package/dist/{essentia-wasm.web-CO8uMw0d.js.map → essentia-wasm.web-Ca8e6Ylt.js.map} +1 -1
- package/dist/{index-BdLMCFEN.js → index-BTtBhJW4.js} +69 -10
- package/dist/index-BTtBhJW4.js.map +1 -0
- package/dist/index.d.ts +24 -1
- package/dist/index.js +1 -1
- package/dist/shader-uniforms.js +365 -0
- package/package.json +1 -1
- package/dist/assets/viji.worker-b3XR7zKX.js.map +0 -1
- package/dist/index-BdLMCFEN.js.map +0 -1
|
@@ -1243,6 +1243,9 @@ class VideoSystem {
|
|
|
1243
1243
|
debugMode = false;
|
|
1244
1244
|
// GPU-only mode flag (for shader compositor only - P5/Canvas need OffscreenCanvas)
|
|
1245
1245
|
directGPUMode = false;
|
|
1246
|
+
// Stream categorization
|
|
1247
|
+
streamType = "additional";
|
|
1248
|
+
deviceId;
|
|
1246
1249
|
/**
|
|
1247
1250
|
* Enable or disable debug logging
|
|
1248
1251
|
*/
|
|
@@ -1283,6 +1286,25 @@ class VideoSystem {
|
|
|
1283
1286
|
constructor() {
|
|
1284
1287
|
this.cvSystem = new CVSystem();
|
|
1285
1288
|
}
|
|
1289
|
+
/**
|
|
1290
|
+
* Set stream type and optional device ID
|
|
1291
|
+
*/
|
|
1292
|
+
setStreamType(type, deviceId) {
|
|
1293
|
+
this.streamType = type;
|
|
1294
|
+
this.deviceId = deviceId;
|
|
1295
|
+
}
|
|
1296
|
+
/**
|
|
1297
|
+
* Get stream type
|
|
1298
|
+
*/
|
|
1299
|
+
getStreamType() {
|
|
1300
|
+
return this.streamType;
|
|
1301
|
+
}
|
|
1302
|
+
/**
|
|
1303
|
+
* Get device ID (for device streams)
|
|
1304
|
+
*/
|
|
1305
|
+
getDeviceId() {
|
|
1306
|
+
return this.deviceId;
|
|
1307
|
+
}
|
|
1286
1308
|
/**
|
|
1287
1309
|
* Get the video API for inclusion in the viji object
|
|
1288
1310
|
*/
|
|
@@ -2132,6 +2154,10 @@ class ShaderWorkerAdapter {
|
|
|
2132
2154
|
}
|
|
2133
2155
|
static MAX_STREAMS = 8;
|
|
2134
2156
|
// Maximum number of compositor input streams
|
|
2157
|
+
static MAX_EXTERNAL_DEVICES = 8;
|
|
2158
|
+
// Maximum number of external devices
|
|
2159
|
+
static MAX_DEVICE_VIDEOS = 8;
|
|
2160
|
+
// Maximum number of device video streams
|
|
2135
2161
|
gl;
|
|
2136
2162
|
program = null;
|
|
2137
2163
|
uniformLocations = /* @__PURE__ */ new Map();
|
|
@@ -2150,6 +2176,8 @@ class ShaderWorkerAdapter {
|
|
|
2150
2176
|
segmentationTexture = null;
|
|
2151
2177
|
// Multi-stream textures
|
|
2152
2178
|
streamTextures = [];
|
|
2179
|
+
// Device video textures
|
|
2180
|
+
deviceTextures = new Array(ShaderWorkerAdapter.MAX_DEVICE_VIDEOS).fill(null);
|
|
2153
2181
|
// Backbuffer support (ping-pong framebuffers)
|
|
2154
2182
|
backbufferFramebuffer = null;
|
|
2155
2183
|
backbufferTexture = null;
|
|
@@ -2400,6 +2428,33 @@ uniform bool u_stream5Connected; // Stream 5 connection status
|
|
|
2400
2428
|
uniform bool u_stream6Connected; // Stream 6 connection status
|
|
2401
2429
|
uniform bool u_stream7Connected; // Stream 7 connection status
|
|
2402
2430
|
|
|
2431
|
+
// Device Video Support (device cameras)
|
|
2432
|
+
uniform int u_deviceCount; // Number of device videos (0-8)
|
|
2433
|
+
uniform sampler2D u_device0; // Device 0 camera texture
|
|
2434
|
+
uniform sampler2D u_device1; // Device 1 camera texture
|
|
2435
|
+
uniform sampler2D u_device2; // Device 2 camera texture
|
|
2436
|
+
uniform sampler2D u_device3; // Device 3 camera texture
|
|
2437
|
+
uniform sampler2D u_device4; // Device 4 camera texture
|
|
2438
|
+
uniform sampler2D u_device5; // Device 5 camera texture
|
|
2439
|
+
uniform sampler2D u_device6; // Device 6 camera texture
|
|
2440
|
+
uniform sampler2D u_device7; // Device 7 camera texture
|
|
2441
|
+
uniform vec2 u_device0Resolution; // Device 0 resolution
|
|
2442
|
+
uniform vec2 u_device1Resolution; // Device 1 resolution
|
|
2443
|
+
uniform vec2 u_device2Resolution; // Device 2 resolution
|
|
2444
|
+
uniform vec2 u_device3Resolution; // Device 3 resolution
|
|
2445
|
+
uniform vec2 u_device4Resolution; // Device 4 resolution
|
|
2446
|
+
uniform vec2 u_device5Resolution; // Device 5 resolution
|
|
2447
|
+
uniform vec2 u_device6Resolution; // Device 6 resolution
|
|
2448
|
+
uniform vec2 u_device7Resolution; // Device 7 resolution
|
|
2449
|
+
uniform bool u_device0Connected; // Device 0 connection status
|
|
2450
|
+
uniform bool u_device1Connected; // Device 1 connection status
|
|
2451
|
+
uniform bool u_device2Connected; // Device 2 connection status
|
|
2452
|
+
uniform bool u_device3Connected; // Device 3 connection status
|
|
2453
|
+
uniform bool u_device4Connected; // Device 4 connection status
|
|
2454
|
+
uniform bool u_device5Connected; // Device 5 connection status
|
|
2455
|
+
uniform bool u_device6Connected; // Device 6 connection status
|
|
2456
|
+
uniform bool u_device7Connected; // Device 7 connection status
|
|
2457
|
+
|
|
2403
2458
|
// CV - Face Detection
|
|
2404
2459
|
uniform int u_faceCount; // Number of detected faces (0-1)
|
|
2405
2460
|
uniform vec4 u_face0Bounds; // First face bounding box (x, y, width, height)
|
|
@@ -2431,6 +2486,58 @@ uniform vec2 u_rightAnklePosition; // Right ankle landmark position in pixels
|
|
|
2431
2486
|
uniform sampler2D u_segmentationMask; // Body segmentation mask texture (0=background, 1=person)
|
|
2432
2487
|
uniform vec2 u_segmentationRes; // Segmentation mask resolution in pixels
|
|
2433
2488
|
|
|
2489
|
+
// Device Sensors - Internal Device (viji.device)
|
|
2490
|
+
uniform vec3 u_deviceAcceleration; // Acceleration without gravity (m/s²) - x, y, z
|
|
2491
|
+
uniform vec3 u_deviceAccelerationGravity; // Acceleration with gravity (m/s²) - x, y, z
|
|
2492
|
+
uniform vec3 u_deviceRotationRate; // Rotation rate (deg/s) - alpha, beta, gamma
|
|
2493
|
+
uniform vec3 u_deviceOrientation; // Device orientation (degrees) - alpha, beta, gamma
|
|
2494
|
+
uniform bool u_deviceOrientationAbsolute; // True if orientation uses magnetometer (compass)
|
|
2495
|
+
uniform vec2 u_deviceLocation; // Geolocation (degrees) - latitude, longitude
|
|
2496
|
+
uniform vec4 u_deviceLocationExt; // Extended geolocation - altitude(m), accuracy(m), heading(deg), speed(m/s)
|
|
2497
|
+
|
|
2498
|
+
// Device Sensors - External Devices (viji.devices[0-7])
|
|
2499
|
+
uniform int u_externalDeviceCount; // Number of connected external devices (0-8)
|
|
2500
|
+
uniform vec3 u_device0Acceleration; // Device 0 acceleration without gravity (m/s²)
|
|
2501
|
+
uniform vec3 u_device0AccelerationGravity; // Device 0 acceleration with gravity (m/s²)
|
|
2502
|
+
uniform vec3 u_device0RotationRate; // Device 0 rotation rate (deg/s)
|
|
2503
|
+
uniform vec3 u_device0Orientation; // Device 0 orientation (degrees) - alpha, beta, gamma
|
|
2504
|
+
uniform vec2 u_device0Location; // Device 0 location - latitude, longitude
|
|
2505
|
+
uniform vec3 u_device1Acceleration; // Device 1 acceleration without gravity (m/s²)
|
|
2506
|
+
uniform vec3 u_device1AccelerationGravity; // Device 1 acceleration with gravity (m/s²)
|
|
2507
|
+
uniform vec3 u_device1RotationRate; // Device 1 rotation rate (deg/s)
|
|
2508
|
+
uniform vec3 u_device1Orientation; // Device 1 orientation (degrees) - alpha, beta, gamma
|
|
2509
|
+
uniform vec2 u_device1Location; // Device 1 location - latitude, longitude
|
|
2510
|
+
uniform vec3 u_device2Acceleration; // Device 2 acceleration without gravity (m/s²)
|
|
2511
|
+
uniform vec3 u_device2AccelerationGravity; // Device 2 acceleration with gravity (m/s²)
|
|
2512
|
+
uniform vec3 u_device2RotationRate; // Device 2 rotation rate (deg/s)
|
|
2513
|
+
uniform vec3 u_device2Orientation; // Device 2 orientation (degrees) - alpha, beta, gamma
|
|
2514
|
+
uniform vec2 u_device2Location; // Device 2 location - latitude, longitude
|
|
2515
|
+
uniform vec3 u_device3Acceleration; // Device 3 acceleration without gravity (m/s²)
|
|
2516
|
+
uniform vec3 u_device3AccelerationGravity; // Device 3 acceleration with gravity (m/s²)
|
|
2517
|
+
uniform vec3 u_device3RotationRate; // Device 3 rotation rate (deg/s)
|
|
2518
|
+
uniform vec3 u_device3Orientation; // Device 3 orientation (degrees) - alpha, beta, gamma
|
|
2519
|
+
uniform vec2 u_device3Location; // Device 3 location - latitude, longitude
|
|
2520
|
+
uniform vec3 u_device4Acceleration; // Device 4 acceleration without gravity (m/s²)
|
|
2521
|
+
uniform vec3 u_device4AccelerationGravity; // Device 4 acceleration with gravity (m/s²)
|
|
2522
|
+
uniform vec3 u_device4RotationRate; // Device 4 rotation rate (deg/s)
|
|
2523
|
+
uniform vec3 u_device4Orientation; // Device 4 orientation (degrees) - alpha, beta, gamma
|
|
2524
|
+
uniform vec2 u_device4Location; // Device 4 location - latitude, longitude
|
|
2525
|
+
uniform vec3 u_device5Acceleration; // Device 5 acceleration without gravity (m/s²)
|
|
2526
|
+
uniform vec3 u_device5AccelerationGravity; // Device 5 acceleration with gravity (m/s²)
|
|
2527
|
+
uniform vec3 u_device5RotationRate; // Device 5 rotation rate (deg/s)
|
|
2528
|
+
uniform vec3 u_device5Orientation; // Device 5 orientation (degrees) - alpha, beta, gamma
|
|
2529
|
+
uniform vec2 u_device5Location; // Device 5 location - latitude, longitude
|
|
2530
|
+
uniform vec3 u_device6Acceleration; // Device 6 acceleration without gravity (m/s²)
|
|
2531
|
+
uniform vec3 u_device6AccelerationGravity; // Device 6 acceleration with gravity (m/s²)
|
|
2532
|
+
uniform vec3 u_device6RotationRate; // Device 6 rotation rate (deg/s)
|
|
2533
|
+
uniform vec3 u_device6Orientation; // Device 6 orientation (degrees) - alpha, beta, gamma
|
|
2534
|
+
uniform vec2 u_device6Location; // Device 6 location - latitude, longitude
|
|
2535
|
+
uniform vec3 u_device7Acceleration; // Device 7 acceleration without gravity (m/s²)
|
|
2536
|
+
uniform vec3 u_device7AccelerationGravity; // Device 7 acceleration with gravity (m/s²)
|
|
2537
|
+
uniform vec3 u_device7RotationRate; // Device 7 rotation rate (deg/s)
|
|
2538
|
+
uniform vec3 u_device7Orientation; // Device 7 orientation (degrees) - alpha, beta, gamma
|
|
2539
|
+
uniform vec2 u_device7Location; // Device 7 location - latitude, longitude
|
|
2540
|
+
|
|
2434
2541
|
// Backbuffer (previous frame feedback)
|
|
2435
2542
|
${this.backbufferEnabled ? "uniform sampler2D backbuffer; // Previous frame texture for feedback effects" : "// backbuffer not enabled"}
|
|
2436
2543
|
`;
|
|
@@ -2672,6 +2779,25 @@ ${error}`);
|
|
|
2672
2779
|
this.setUniform(connectedUniform, "bool", false);
|
|
2673
2780
|
}
|
|
2674
2781
|
}
|
|
2782
|
+
const devices = viji.devices || [];
|
|
2783
|
+
const deviceCount = Math.min(devices.length, ShaderWorkerAdapter.MAX_DEVICE_VIDEOS);
|
|
2784
|
+
this.setUniform("u_deviceCount", "int", deviceCount);
|
|
2785
|
+
for (let i = 0; i < ShaderWorkerAdapter.MAX_DEVICE_VIDEOS; i++) {
|
|
2786
|
+
const connectedUniform = `u_device${i}Connected`;
|
|
2787
|
+
const resolutionUniform = `u_device${i}Resolution`;
|
|
2788
|
+
if (i < deviceCount && devices[i]?.video?.isConnected && devices[i].video.currentFrame) {
|
|
2789
|
+
this.updateDeviceTexture(i, devices[i].video.currentFrame);
|
|
2790
|
+
this.setUniform(
|
|
2791
|
+
resolutionUniform,
|
|
2792
|
+
"vec2",
|
|
2793
|
+
[devices[i].video.frameWidth, devices[i].video.frameHeight]
|
|
2794
|
+
);
|
|
2795
|
+
this.setUniform(connectedUniform, "bool", true);
|
|
2796
|
+
} else {
|
|
2797
|
+
this.setUniform(resolutionUniform, "vec2", [0, 0]);
|
|
2798
|
+
this.setUniform(connectedUniform, "bool", false);
|
|
2799
|
+
}
|
|
2800
|
+
}
|
|
2675
2801
|
const faces = video.faces || [];
|
|
2676
2802
|
this.setUniform("u_faceCount", "int", faces.length);
|
|
2677
2803
|
if (faces.length > 0) {
|
|
@@ -2741,6 +2867,134 @@ ${error}`);
|
|
|
2741
2867
|
} else {
|
|
2742
2868
|
this.setUniform("u_segmentationRes", "vec2", [0, 0]);
|
|
2743
2869
|
}
|
|
2870
|
+
const internalDevice = viji.device;
|
|
2871
|
+
if (internalDevice) {
|
|
2872
|
+
const motion = internalDevice.motion;
|
|
2873
|
+
if (motion?.acceleration) {
|
|
2874
|
+
this.setUniform("u_deviceAcceleration", "vec3", [
|
|
2875
|
+
motion.acceleration.x ?? 0,
|
|
2876
|
+
motion.acceleration.y ?? 0,
|
|
2877
|
+
motion.acceleration.z ?? 0
|
|
2878
|
+
]);
|
|
2879
|
+
} else {
|
|
2880
|
+
this.setUniform("u_deviceAcceleration", "vec3", [0, 0, 0]);
|
|
2881
|
+
}
|
|
2882
|
+
if (motion?.accelerationIncludingGravity) {
|
|
2883
|
+
this.setUniform("u_deviceAccelerationGravity", "vec3", [
|
|
2884
|
+
motion.accelerationIncludingGravity.x ?? 0,
|
|
2885
|
+
motion.accelerationIncludingGravity.y ?? 0,
|
|
2886
|
+
motion.accelerationIncludingGravity.z ?? 0
|
|
2887
|
+
]);
|
|
2888
|
+
} else {
|
|
2889
|
+
this.setUniform("u_deviceAccelerationGravity", "vec3", [0, 0, 0]);
|
|
2890
|
+
}
|
|
2891
|
+
if (motion?.rotationRate) {
|
|
2892
|
+
this.setUniform("u_deviceRotationRate", "vec3", [
|
|
2893
|
+
motion.rotationRate.alpha ?? 0,
|
|
2894
|
+
motion.rotationRate.beta ?? 0,
|
|
2895
|
+
motion.rotationRate.gamma ?? 0
|
|
2896
|
+
]);
|
|
2897
|
+
} else {
|
|
2898
|
+
this.setUniform("u_deviceRotationRate", "vec3", [0, 0, 0]);
|
|
2899
|
+
}
|
|
2900
|
+
const orientation = internalDevice.orientation;
|
|
2901
|
+
if (orientation) {
|
|
2902
|
+
this.setUniform("u_deviceOrientation", "vec3", [
|
|
2903
|
+
orientation.alpha ?? 0,
|
|
2904
|
+
orientation.beta ?? 0,
|
|
2905
|
+
orientation.gamma ?? 0
|
|
2906
|
+
]);
|
|
2907
|
+
this.setUniform("u_deviceOrientationAbsolute", "bool", orientation.absolute);
|
|
2908
|
+
} else {
|
|
2909
|
+
this.setUniform("u_deviceOrientation", "vec3", [0, 0, 0]);
|
|
2910
|
+
this.setUniform("u_deviceOrientationAbsolute", "bool", false);
|
|
2911
|
+
}
|
|
2912
|
+
const geolocation = internalDevice.geolocation;
|
|
2913
|
+
if (geolocation) {
|
|
2914
|
+
this.setUniform("u_deviceLocation", "vec2", [
|
|
2915
|
+
geolocation.latitude ?? 0,
|
|
2916
|
+
geolocation.longitude ?? 0
|
|
2917
|
+
]);
|
|
2918
|
+
this.setUniform("u_deviceLocationExt", "vec4", [
|
|
2919
|
+
geolocation.altitude ?? 0,
|
|
2920
|
+
geolocation.accuracy ?? 0,
|
|
2921
|
+
geolocation.heading ?? 0,
|
|
2922
|
+
geolocation.speed ?? 0
|
|
2923
|
+
]);
|
|
2924
|
+
} else {
|
|
2925
|
+
this.setUniform("u_deviceLocation", "vec2", [0, 0]);
|
|
2926
|
+
this.setUniform("u_deviceLocationExt", "vec4", [0, 0, 0, 0]);
|
|
2927
|
+
}
|
|
2928
|
+
} else {
|
|
2929
|
+
this.setUniform("u_deviceAcceleration", "vec3", [0, 0, 0]);
|
|
2930
|
+
this.setUniform("u_deviceAccelerationGravity", "vec3", [0, 0, 0]);
|
|
2931
|
+
this.setUniform("u_deviceRotationRate", "vec3", [0, 0, 0]);
|
|
2932
|
+
this.setUniform("u_deviceOrientation", "vec3", [0, 0, 0]);
|
|
2933
|
+
this.setUniform("u_deviceOrientationAbsolute", "bool", false);
|
|
2934
|
+
this.setUniform("u_deviceLocation", "vec2", [0, 0]);
|
|
2935
|
+
this.setUniform("u_deviceLocationExt", "vec4", [0, 0, 0, 0]);
|
|
2936
|
+
}
|
|
2937
|
+
const externalDevices = viji.devices || [];
|
|
2938
|
+
const externalDeviceCount = Math.min(externalDevices.length, ShaderWorkerAdapter.MAX_EXTERNAL_DEVICES);
|
|
2939
|
+
this.setUniform("u_externalDeviceCount", "int", externalDeviceCount);
|
|
2940
|
+
for (let i = 0; i < ShaderWorkerAdapter.MAX_EXTERNAL_DEVICES; i++) {
|
|
2941
|
+
if (i < externalDeviceCount) {
|
|
2942
|
+
const device = externalDevices[i];
|
|
2943
|
+
const motion = device.motion;
|
|
2944
|
+
if (motion?.acceleration) {
|
|
2945
|
+
this.setUniform(`u_device${i}Acceleration`, "vec3", [
|
|
2946
|
+
motion.acceleration.x ?? 0,
|
|
2947
|
+
motion.acceleration.y ?? 0,
|
|
2948
|
+
motion.acceleration.z ?? 0
|
|
2949
|
+
]);
|
|
2950
|
+
} else {
|
|
2951
|
+
this.setUniform(`u_device${i}Acceleration`, "vec3", [0, 0, 0]);
|
|
2952
|
+
}
|
|
2953
|
+
if (motion?.accelerationIncludingGravity) {
|
|
2954
|
+
this.setUniform(`u_device${i}AccelerationGravity`, "vec3", [
|
|
2955
|
+
motion.accelerationIncludingGravity.x ?? 0,
|
|
2956
|
+
motion.accelerationIncludingGravity.y ?? 0,
|
|
2957
|
+
motion.accelerationIncludingGravity.z ?? 0
|
|
2958
|
+
]);
|
|
2959
|
+
} else {
|
|
2960
|
+
this.setUniform(`u_device${i}AccelerationGravity`, "vec3", [0, 0, 0]);
|
|
2961
|
+
}
|
|
2962
|
+
if (motion?.rotationRate) {
|
|
2963
|
+
this.setUniform(`u_device${i}RotationRate`, "vec3", [
|
|
2964
|
+
motion.rotationRate.alpha ?? 0,
|
|
2965
|
+
motion.rotationRate.beta ?? 0,
|
|
2966
|
+
motion.rotationRate.gamma ?? 0
|
|
2967
|
+
]);
|
|
2968
|
+
} else {
|
|
2969
|
+
this.setUniform(`u_device${i}RotationRate`, "vec3", [0, 0, 0]);
|
|
2970
|
+
}
|
|
2971
|
+
const orientation = device.orientation;
|
|
2972
|
+
if (orientation) {
|
|
2973
|
+
this.setUniform(`u_device${i}Orientation`, "vec3", [
|
|
2974
|
+
orientation.alpha ?? 0,
|
|
2975
|
+
orientation.beta ?? 0,
|
|
2976
|
+
orientation.gamma ?? 0
|
|
2977
|
+
]);
|
|
2978
|
+
} else {
|
|
2979
|
+
this.setUniform(`u_device${i}Orientation`, "vec3", [0, 0, 0]);
|
|
2980
|
+
}
|
|
2981
|
+
const geolocation = device.geolocation;
|
|
2982
|
+
if (geolocation) {
|
|
2983
|
+
this.setUniform(`u_device${i}Location`, "vec2", [
|
|
2984
|
+
geolocation.latitude ?? 0,
|
|
2985
|
+
geolocation.longitude ?? 0
|
|
2986
|
+
]);
|
|
2987
|
+
} else {
|
|
2988
|
+
this.setUniform(`u_device${i}Location`, "vec2", [0, 0]);
|
|
2989
|
+
}
|
|
2990
|
+
} else {
|
|
2991
|
+
this.setUniform(`u_device${i}Acceleration`, "vec3", [0, 0, 0]);
|
|
2992
|
+
this.setUniform(`u_device${i}AccelerationGravity`, "vec3", [0, 0, 0]);
|
|
2993
|
+
this.setUniform(`u_device${i}RotationRate`, "vec3", [0, 0, 0]);
|
|
2994
|
+
this.setUniform(`u_device${i}Orientation`, "vec3", [0, 0, 0]);
|
|
2995
|
+
this.setUniform(`u_device${i}Location`, "vec2", [0, 0]);
|
|
2996
|
+
}
|
|
2997
|
+
}
|
|
2744
2998
|
}
|
|
2745
2999
|
/**
|
|
2746
3000
|
* Update parameter uniforms from parameter objects
|
|
@@ -2936,6 +3190,42 @@ ${error}`);
|
|
|
2936
3190
|
streamFrame.close();
|
|
2937
3191
|
}
|
|
2938
3192
|
}
|
|
3193
|
+
/**
|
|
3194
|
+
* Update device video texture
|
|
3195
|
+
*/
|
|
3196
|
+
updateDeviceTexture(index, frame) {
|
|
3197
|
+
if (!this.gl || index >= ShaderWorkerAdapter.MAX_DEVICE_VIDEOS) return;
|
|
3198
|
+
if (!this.deviceTextures[index]) {
|
|
3199
|
+
this.deviceTextures[index] = this.gl.createTexture();
|
|
3200
|
+
}
|
|
3201
|
+
const texture = this.deviceTextures[index];
|
|
3202
|
+
if (!texture) return;
|
|
3203
|
+
const textureUnit = 8 + index;
|
|
3204
|
+
this.gl.activeTexture(this.gl.TEXTURE0 + textureUnit);
|
|
3205
|
+
this.gl.bindTexture(this.gl.TEXTURE_2D, texture);
|
|
3206
|
+
const shouldFlip = frame instanceof OffscreenCanvas;
|
|
3207
|
+
if (shouldFlip) {
|
|
3208
|
+
this.gl.pixelStorei(this.gl.UNPACK_FLIP_Y_WEBGL, true);
|
|
3209
|
+
}
|
|
3210
|
+
this.gl.texImage2D(
|
|
3211
|
+
this.gl.TEXTURE_2D,
|
|
3212
|
+
0,
|
|
3213
|
+
this.gl.RGBA,
|
|
3214
|
+
this.gl.RGBA,
|
|
3215
|
+
this.gl.UNSIGNED_BYTE,
|
|
3216
|
+
frame
|
|
3217
|
+
);
|
|
3218
|
+
if (shouldFlip) {
|
|
3219
|
+
this.gl.pixelStorei(this.gl.UNPACK_FLIP_Y_WEBGL, false);
|
|
3220
|
+
}
|
|
3221
|
+
this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_MIN_FILTER, this.gl.LINEAR);
|
|
3222
|
+
this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_MAG_FILTER, this.gl.LINEAR);
|
|
3223
|
+
this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_WRAP_S, this.gl.CLAMP_TO_EDGE);
|
|
3224
|
+
this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_WRAP_T, this.gl.CLAMP_TO_EDGE);
|
|
3225
|
+
if (frame instanceof ImageBitmap) {
|
|
3226
|
+
frame.close();
|
|
3227
|
+
}
|
|
3228
|
+
}
|
|
2939
3229
|
/**
|
|
2940
3230
|
* Update segmentation mask texture
|
|
2941
3231
|
*/
|
|
@@ -3035,6 +3325,10 @@ ${error}`);
|
|
|
3035
3325
|
if (texture) gl.deleteTexture(texture);
|
|
3036
3326
|
}
|
|
3037
3327
|
this.streamTextures = [];
|
|
3328
|
+
for (const texture of this.deviceTextures) {
|
|
3329
|
+
if (texture) gl.deleteTexture(texture);
|
|
3330
|
+
}
|
|
3331
|
+
this.deviceTextures = [];
|
|
3038
3332
|
for (const texture of this.textures.values()) {
|
|
3039
3333
|
if (texture) gl.deleteTexture(texture);
|
|
3040
3334
|
}
|
|
@@ -3099,8 +3393,6 @@ class VijiWorkerRuntime {
|
|
|
3099
3393
|
interactionSystem;
|
|
3100
3394
|
// Video systems (multi-stream) - index 0 = main with CV, 1+ = additional without CV
|
|
3101
3395
|
videoSystems = [];
|
|
3102
|
-
hasMainVideoStream = false;
|
|
3103
|
-
// Track if videoSystems[0] is main stream
|
|
3104
3396
|
// Auto-capture for frame sources
|
|
3105
3397
|
autoCaptureEnabled = false;
|
|
3106
3398
|
autoCaptureFormat = { flipY: true };
|
|
@@ -3397,28 +3689,13 @@ class VijiWorkerRuntime {
|
|
|
3397
3689
|
}
|
|
3398
3690
|
/**
|
|
3399
3691
|
* Updates viji.streams from videoSystems array
|
|
3400
|
-
*
|
|
3692
|
+
* Filters only 'additional' type streams (excludes main and device streams)
|
|
3401
3693
|
*/
|
|
3402
3694
|
updateVijiStreams() {
|
|
3403
|
-
const
|
|
3404
|
-
const freshStreams = this.videoSystems.slice(startIndex).map((vs) => {
|
|
3405
|
-
const api = vs?.getVideoAPI() || {
|
|
3406
|
-
isConnected: false,
|
|
3407
|
-
currentFrame: null,
|
|
3408
|
-
frameWidth: 0,
|
|
3409
|
-
frameHeight: 0,
|
|
3410
|
-
frameRate: 0,
|
|
3411
|
-
getFrameData: () => null
|
|
3412
|
-
};
|
|
3413
|
-
return api;
|
|
3414
|
-
});
|
|
3695
|
+
const freshStreams = this.videoSystems.filter((vs) => vs && vs.getStreamType() === "additional").map((vs) => vs.getVideoAPI());
|
|
3415
3696
|
this.viji.streams.length = freshStreams.length;
|
|
3416
3697
|
for (let i = 0; i < freshStreams.length; i++) {
|
|
3417
|
-
|
|
3418
|
-
Object.assign(this.viji.streams[i], freshStreams[i]);
|
|
3419
|
-
} else {
|
|
3420
|
-
this.viji.streams[i] = freshStreams[i];
|
|
3421
|
-
}
|
|
3698
|
+
this.viji.streams[i] = freshStreams[i];
|
|
3422
3699
|
}
|
|
3423
3700
|
}
|
|
3424
3701
|
// Send all parameters (from helper functions) to host
|
|
@@ -3659,10 +3936,11 @@ class VijiWorkerRuntime {
|
|
|
3659
3936
|
};
|
|
3660
3937
|
}
|
|
3661
3938
|
handleVideoCanvasSetup(message) {
|
|
3662
|
-
const { streamIndex,
|
|
3939
|
+
const { streamIndex, streamType, deviceId } = message.data;
|
|
3663
3940
|
const index = streamIndex || 0;
|
|
3664
3941
|
const videoSystem = new VideoSystem();
|
|
3665
3942
|
videoSystem.setDebugMode(this.debugMode);
|
|
3943
|
+
videoSystem.setStreamType(streamType, deviceId);
|
|
3666
3944
|
videoSystem.handleCanvasSetup({
|
|
3667
3945
|
offscreenCanvas: message.data.offscreenCanvas,
|
|
3668
3946
|
width: message.data.width,
|
|
@@ -3670,11 +3948,14 @@ class VijiWorkerRuntime {
|
|
|
3670
3948
|
timestamp: message.data.timestamp
|
|
3671
3949
|
});
|
|
3672
3950
|
this.videoSystems[index] = videoSystem;
|
|
3673
|
-
this.debugLog(`Video system setup at index ${index},
|
|
3674
|
-
|
|
3675
|
-
|
|
3676
|
-
|
|
3677
|
-
|
|
3951
|
+
this.debugLog(`Video system setup at index ${index}, type: ${streamType}${deviceId ? `, deviceId: ${deviceId}` : ""}`);
|
|
3952
|
+
switch (streamType) {
|
|
3953
|
+
case "main":
|
|
3954
|
+
Object.assign(this.viji.video, videoSystem.getVideoAPI());
|
|
3955
|
+
break;
|
|
3956
|
+
case "additional":
|
|
3957
|
+
this.updateVijiStreams();
|
|
3958
|
+
break;
|
|
3678
3959
|
}
|
|
3679
3960
|
}
|
|
3680
3961
|
handleVideoFrameUpdate(message) {
|
|
@@ -3708,7 +3989,6 @@ class VijiWorkerRuntime {
|
|
|
3708
3989
|
handleVideoStreamsPrepare(message) {
|
|
3709
3990
|
const { mainStream, mediaStreamCount, directFrameCount } = message.data;
|
|
3710
3991
|
const totalStreams = (mainStream ? 1 : 0) + mediaStreamCount + directFrameCount;
|
|
3711
|
-
this.hasMainVideoStream = mainStream;
|
|
3712
3992
|
this.debugLog(`[Compositor] Preparing video streams: main=${mainStream}, media=${mediaStreamCount}, direct=${directFrameCount}, total=${totalStreams}`);
|
|
3713
3993
|
while (this.videoSystems.length < totalStreams) {
|
|
3714
3994
|
this.videoSystems.push(new VideoSystem());
|
|
@@ -3822,18 +4102,12 @@ class VijiWorkerRuntime {
|
|
|
3822
4102
|
if (!tctx) throw new Error("Failed to get 2D context");
|
|
3823
4103
|
tctx.drawImage(sourceCanvas, sx, sy, sWidth, sHeight, 0, 0, targetWidth, targetHeight);
|
|
3824
4104
|
const blob = await temp.convertToBlob({ type: mimeType });
|
|
3825
|
-
const arrayBuffer = await blob.arrayBuffer();
|
|
3826
4105
|
self.postMessage({
|
|
3827
4106
|
type: "capture-frame-result",
|
|
3828
4107
|
id: message.id,
|
|
3829
4108
|
timestamp: Date.now(),
|
|
3830
|
-
data: {
|
|
3831
|
-
|
|
3832
|
-
buffer: arrayBuffer,
|
|
3833
|
-
width: targetWidth,
|
|
3834
|
-
height: targetHeight
|
|
3835
|
-
}
|
|
3836
|
-
}, [arrayBuffer]);
|
|
4109
|
+
data: { blob }
|
|
4110
|
+
});
|
|
3837
4111
|
} catch (error) {
|
|
3838
4112
|
this.postMessage("error", {
|
|
3839
4113
|
id: message.id,
|
|
@@ -3951,7 +4225,16 @@ class VijiWorkerRuntime {
|
|
|
3951
4225
|
handleDeviceStateUpdate(message) {
|
|
3952
4226
|
this.deviceState = message.data;
|
|
3953
4227
|
this.viji.device = this.deviceState.device;
|
|
3954
|
-
|
|
4228
|
+
const devicesWithVideo = this.deviceState.devices.map((device) => {
|
|
4229
|
+
const videoSystem = this.videoSystems.find(
|
|
4230
|
+
(vs) => vs && vs.getStreamType() === "device" && vs.getDeviceId() === device.id
|
|
4231
|
+
);
|
|
4232
|
+
return {
|
|
4233
|
+
...device,
|
|
4234
|
+
video: videoSystem ? videoSystem.getVideoAPI() : null
|
|
4235
|
+
};
|
|
4236
|
+
});
|
|
4237
|
+
this.viji.devices = devicesWithVideo;
|
|
3955
4238
|
}
|
|
3956
4239
|
}
|
|
3957
4240
|
class SceneAnalyzer {
|
|
@@ -4018,4 +4301,4 @@ async function setSceneCode(sceneCode) {
|
|
|
4018
4301
|
}
|
|
4019
4302
|
}
|
|
4020
4303
|
self.setSceneCode = setSceneCode;
|
|
4021
|
-
//# sourceMappingURL=viji.worker-
|
|
4304
|
+
//# sourceMappingURL=viji.worker-BGYUuaFF.js.map
|