@viji-dev/core 0.3.1 → 0.3.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1243,6 +1243,9 @@ class VideoSystem {
1243
1243
  debugMode = false;
1244
1244
  // GPU-only mode flag (for shader compositor only - P5/Canvas need OffscreenCanvas)
1245
1245
  directGPUMode = false;
1246
+ // Stream categorization
1247
+ streamType = "additional";
1248
+ deviceId;
1246
1249
  /**
1247
1250
  * Enable or disable debug logging
1248
1251
  */
@@ -1283,6 +1286,25 @@ class VideoSystem {
1283
1286
  constructor() {
1284
1287
  this.cvSystem = new CVSystem();
1285
1288
  }
1289
+ /**
1290
+ * Set stream type and optional device ID
1291
+ */
1292
+ setStreamType(type, deviceId) {
1293
+ this.streamType = type;
1294
+ this.deviceId = deviceId;
1295
+ }
1296
+ /**
1297
+ * Get stream type
1298
+ */
1299
+ getStreamType() {
1300
+ return this.streamType;
1301
+ }
1302
+ /**
1303
+ * Get device ID (for device streams)
1304
+ */
1305
+ getDeviceId() {
1306
+ return this.deviceId;
1307
+ }
1286
1308
  /**
1287
1309
  * Get the video API for inclusion in the viji object
1288
1310
  */
@@ -2134,6 +2156,8 @@ class ShaderWorkerAdapter {
2134
2156
  // Maximum number of compositor input streams
2135
2157
  static MAX_EXTERNAL_DEVICES = 8;
2136
2158
  // Maximum number of external devices
2159
+ static MAX_DEVICE_VIDEOS = 8;
2160
+ // Maximum number of device video streams
2137
2161
  gl;
2138
2162
  program = null;
2139
2163
  uniformLocations = /* @__PURE__ */ new Map();
@@ -2152,6 +2176,8 @@ class ShaderWorkerAdapter {
2152
2176
  segmentationTexture = null;
2153
2177
  // Multi-stream textures
2154
2178
  streamTextures = [];
2179
+ // Device video textures
2180
+ deviceTextures = new Array(ShaderWorkerAdapter.MAX_DEVICE_VIDEOS).fill(null);
2155
2181
  // Backbuffer support (ping-pong framebuffers)
2156
2182
  backbufferFramebuffer = null;
2157
2183
  backbufferTexture = null;
@@ -2402,6 +2428,33 @@ uniform bool u_stream5Connected; // Stream 5 connection status
2402
2428
  uniform bool u_stream6Connected; // Stream 6 connection status
2403
2429
  uniform bool u_stream7Connected; // Stream 7 connection status
2404
2430
 
2431
+ // Device Video Support (device cameras)
2432
+ uniform int u_deviceCount; // Number of device videos (0-8)
2433
+ uniform sampler2D u_device0; // Device 0 camera texture
2434
+ uniform sampler2D u_device1; // Device 1 camera texture
2435
+ uniform sampler2D u_device2; // Device 2 camera texture
2436
+ uniform sampler2D u_device3; // Device 3 camera texture
2437
+ uniform sampler2D u_device4; // Device 4 camera texture
2438
+ uniform sampler2D u_device5; // Device 5 camera texture
2439
+ uniform sampler2D u_device6; // Device 6 camera texture
2440
+ uniform sampler2D u_device7; // Device 7 camera texture
2441
+ uniform vec2 u_device0Resolution; // Device 0 resolution
2442
+ uniform vec2 u_device1Resolution; // Device 1 resolution
2443
+ uniform vec2 u_device2Resolution; // Device 2 resolution
2444
+ uniform vec2 u_device3Resolution; // Device 3 resolution
2445
+ uniform vec2 u_device4Resolution; // Device 4 resolution
2446
+ uniform vec2 u_device5Resolution; // Device 5 resolution
2447
+ uniform vec2 u_device6Resolution; // Device 6 resolution
2448
+ uniform vec2 u_device7Resolution; // Device 7 resolution
2449
+ uniform bool u_device0Connected; // Device 0 connection status
2450
+ uniform bool u_device1Connected; // Device 1 connection status
2451
+ uniform bool u_device2Connected; // Device 2 connection status
2452
+ uniform bool u_device3Connected; // Device 3 connection status
2453
+ uniform bool u_device4Connected; // Device 4 connection status
2454
+ uniform bool u_device5Connected; // Device 5 connection status
2455
+ uniform bool u_device6Connected; // Device 6 connection status
2456
+ uniform bool u_device7Connected; // Device 7 connection status
2457
+
2405
2458
  // CV - Face Detection
2406
2459
  uniform int u_faceCount; // Number of detected faces (0-1)
2407
2460
  uniform vec4 u_face0Bounds; // First face bounding box (x, y, width, height)
@@ -2726,6 +2779,25 @@ ${error}`);
2726
2779
  this.setUniform(connectedUniform, "bool", false);
2727
2780
  }
2728
2781
  }
2782
+ const devices = viji.devices || [];
2783
+ const deviceCount = Math.min(devices.length, ShaderWorkerAdapter.MAX_DEVICE_VIDEOS);
2784
+ this.setUniform("u_deviceCount", "int", deviceCount);
2785
+ for (let i = 0; i < ShaderWorkerAdapter.MAX_DEVICE_VIDEOS; i++) {
2786
+ const connectedUniform = `u_device${i}Connected`;
2787
+ const resolutionUniform = `u_device${i}Resolution`;
2788
+ if (i < deviceCount && devices[i]?.video?.isConnected && devices[i].video.currentFrame) {
2789
+ this.updateDeviceTexture(i, devices[i].video.currentFrame);
2790
+ this.setUniform(
2791
+ resolutionUniform,
2792
+ "vec2",
2793
+ [devices[i].video.frameWidth, devices[i].video.frameHeight]
2794
+ );
2795
+ this.setUniform(connectedUniform, "bool", true);
2796
+ } else {
2797
+ this.setUniform(resolutionUniform, "vec2", [0, 0]);
2798
+ this.setUniform(connectedUniform, "bool", false);
2799
+ }
2800
+ }
2729
2801
  const faces = video.faces || [];
2730
2802
  this.setUniform("u_faceCount", "int", faces.length);
2731
2803
  if (faces.length > 0) {
@@ -3118,6 +3190,42 @@ ${error}`);
3118
3190
  streamFrame.close();
3119
3191
  }
3120
3192
  }
3193
+ /**
3194
+ * Update device video texture
3195
+ */
3196
+ updateDeviceTexture(index, frame) {
3197
+ if (!this.gl || index >= ShaderWorkerAdapter.MAX_DEVICE_VIDEOS) return;
3198
+ if (!this.deviceTextures[index]) {
3199
+ this.deviceTextures[index] = this.gl.createTexture();
3200
+ }
3201
+ const texture = this.deviceTextures[index];
3202
+ if (!texture) return;
3203
+ const textureUnit = 8 + index;
3204
+ this.gl.activeTexture(this.gl.TEXTURE0 + textureUnit);
3205
+ this.gl.bindTexture(this.gl.TEXTURE_2D, texture);
3206
+ const shouldFlip = frame instanceof OffscreenCanvas;
3207
+ if (shouldFlip) {
3208
+ this.gl.pixelStorei(this.gl.UNPACK_FLIP_Y_WEBGL, true);
3209
+ }
3210
+ this.gl.texImage2D(
3211
+ this.gl.TEXTURE_2D,
3212
+ 0,
3213
+ this.gl.RGBA,
3214
+ this.gl.RGBA,
3215
+ this.gl.UNSIGNED_BYTE,
3216
+ frame
3217
+ );
3218
+ if (shouldFlip) {
3219
+ this.gl.pixelStorei(this.gl.UNPACK_FLIP_Y_WEBGL, false);
3220
+ }
3221
+ this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_MIN_FILTER, this.gl.LINEAR);
3222
+ this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_MAG_FILTER, this.gl.LINEAR);
3223
+ this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_WRAP_S, this.gl.CLAMP_TO_EDGE);
3224
+ this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_WRAP_T, this.gl.CLAMP_TO_EDGE);
3225
+ if (frame instanceof ImageBitmap) {
3226
+ frame.close();
3227
+ }
3228
+ }
3121
3229
  /**
3122
3230
  * Update segmentation mask texture
3123
3231
  */
@@ -3217,6 +3325,10 @@ ${error}`);
3217
3325
  if (texture) gl.deleteTexture(texture);
3218
3326
  }
3219
3327
  this.streamTextures = [];
3328
+ for (const texture of this.deviceTextures) {
3329
+ if (texture) gl.deleteTexture(texture);
3330
+ }
3331
+ this.deviceTextures = [];
3220
3332
  for (const texture of this.textures.values()) {
3221
3333
  if (texture) gl.deleteTexture(texture);
3222
3334
  }
@@ -3281,8 +3393,6 @@ class VijiWorkerRuntime {
3281
3393
  interactionSystem;
3282
3394
  // Video systems (multi-stream) - index 0 = main with CV, 1+ = additional without CV
3283
3395
  videoSystems = [];
3284
- hasMainVideoStream = false;
3285
- // Track if videoSystems[0] is main stream
3286
3396
  // Auto-capture for frame sources
3287
3397
  autoCaptureEnabled = false;
3288
3398
  autoCaptureFormat = { flipY: true };
@@ -3579,28 +3689,13 @@ class VijiWorkerRuntime {
3579
3689
  }
3580
3690
  /**
3581
3691
  * Updates viji.streams from videoSystems array
3582
- * Slices based on whether there's a main stream at index 0
3692
+ * Filters only 'additional' type streams (excludes main and device streams)
3583
3693
  */
3584
3694
  updateVijiStreams() {
3585
- const startIndex = this.hasMainVideoStream ? 1 : 0;
3586
- const freshStreams = this.videoSystems.slice(startIndex).map((vs) => {
3587
- const api = vs?.getVideoAPI() || {
3588
- isConnected: false,
3589
- currentFrame: null,
3590
- frameWidth: 0,
3591
- frameHeight: 0,
3592
- frameRate: 0,
3593
- getFrameData: () => null
3594
- };
3595
- return api;
3596
- });
3695
+ const freshStreams = this.videoSystems.filter((vs) => vs && vs.getStreamType() === "additional").map((vs) => vs.getVideoAPI());
3597
3696
  this.viji.streams.length = freshStreams.length;
3598
3697
  for (let i = 0; i < freshStreams.length; i++) {
3599
- if (this.viji.streams[i]) {
3600
- Object.assign(this.viji.streams[i], freshStreams[i]);
3601
- } else {
3602
- this.viji.streams[i] = freshStreams[i];
3603
- }
3698
+ this.viji.streams[i] = freshStreams[i];
3604
3699
  }
3605
3700
  }
3606
3701
  // Send all parameters (from helper functions) to host
@@ -3841,10 +3936,11 @@ class VijiWorkerRuntime {
3841
3936
  };
3842
3937
  }
3843
3938
  handleVideoCanvasSetup(message) {
3844
- const { streamIndex, isMain } = message.data;
3939
+ const { streamIndex, streamType, deviceId } = message.data;
3845
3940
  const index = streamIndex || 0;
3846
3941
  const videoSystem = new VideoSystem();
3847
3942
  videoSystem.setDebugMode(this.debugMode);
3943
+ videoSystem.setStreamType(streamType, deviceId);
3848
3944
  videoSystem.handleCanvasSetup({
3849
3945
  offscreenCanvas: message.data.offscreenCanvas,
3850
3946
  width: message.data.width,
@@ -3852,29 +3948,34 @@ class VijiWorkerRuntime {
3852
3948
  timestamp: message.data.timestamp
3853
3949
  });
3854
3950
  this.videoSystems[index] = videoSystem;
3855
- this.debugLog(`Video system setup at index ${index}, isMain: ${isMain}`);
3856
- if (index === 0) {
3857
- Object.assign(this.viji.video, videoSystem.getVideoAPI());
3858
- } else {
3859
- this.updateVijiStreams();
3951
+ this.debugLog(`Video system setup at index ${index}, type: ${streamType}${deviceId ? `, deviceId: ${deviceId}` : ""}`);
3952
+ switch (streamType) {
3953
+ case "main":
3954
+ Object.assign(this.viji.video, videoSystem.getVideoAPI());
3955
+ break;
3956
+ case "additional":
3957
+ this.updateVijiStreams();
3958
+ break;
3860
3959
  }
3861
3960
  }
3862
3961
  handleVideoFrameUpdate(message) {
3863
3962
  const index = message.data.streamIndex || 0;
3864
- if (this.videoSystems[index]) {
3865
- this.videoSystems[index].handleFrameUpdate({
3963
+ const vs = this.videoSystems[index];
3964
+ if (vs) {
3965
+ vs.handleFrameUpdate({
3866
3966
  imageBitmap: message.data.imageBitmap,
3867
3967
  timestamp: message.data.timestamp
3868
3968
  });
3869
- if (index === 0) {
3870
- Object.assign(this.viji.video, this.videoSystems[index].getVideoAPI());
3969
+ if (vs.getStreamType() === "main") {
3970
+ Object.assign(this.viji.video, vs.getVideoAPI());
3871
3971
  }
3872
3972
  }
3873
3973
  }
3874
3974
  handleVideoConfigUpdate(message) {
3875
3975
  const index = message.data.streamIndex || 0;
3876
- if (this.videoSystems[index]) {
3877
- this.videoSystems[index].handleVideoConfigUpdate({
3976
+ const vs = this.videoSystems[index];
3977
+ if (vs) {
3978
+ vs.handleVideoConfigUpdate({
3878
3979
  ...message.data.targetFrameRate && { targetFrameRate: message.data.targetFrameRate },
3879
3980
  ...message.data.cvConfig && { cvConfig: message.data.cvConfig },
3880
3981
  ...message.data.width && { width: message.data.width },
@@ -3882,15 +3983,14 @@ class VijiWorkerRuntime {
3882
3983
  ...message.data.disconnect && { disconnect: message.data.disconnect },
3883
3984
  timestamp: message.data.timestamp
3884
3985
  });
3885
- if (index === 0) {
3886
- Object.assign(this.viji.video, this.videoSystems[index].getVideoAPI());
3986
+ if (vs.getStreamType() === "main") {
3987
+ Object.assign(this.viji.video, vs.getVideoAPI());
3887
3988
  }
3888
3989
  }
3889
3990
  }
3890
3991
  handleVideoStreamsPrepare(message) {
3891
3992
  const { mainStream, mediaStreamCount, directFrameCount } = message.data;
3892
3993
  const totalStreams = (mainStream ? 1 : 0) + mediaStreamCount + directFrameCount;
3893
- this.hasMainVideoStream = mainStream;
3894
3994
  this.debugLog(`[Compositor] Preparing video streams: main=${mainStream}, media=${mediaStreamCount}, direct=${directFrameCount}, total=${totalStreams}`);
3895
3995
  while (this.videoSystems.length < totalStreams) {
3896
3996
  this.videoSystems.push(new VideoSystem());
@@ -4127,7 +4227,16 @@ class VijiWorkerRuntime {
4127
4227
  handleDeviceStateUpdate(message) {
4128
4228
  this.deviceState = message.data;
4129
4229
  this.viji.device = this.deviceState.device;
4130
- this.viji.devices = this.deviceState.devices;
4230
+ const devicesWithVideo = this.deviceState.devices.map((device) => {
4231
+ const videoSystem = this.videoSystems.find(
4232
+ (vs) => vs && vs.getStreamType() === "device" && vs.getDeviceId() === device.id
4233
+ );
4234
+ return {
4235
+ ...device,
4236
+ video: videoSystem ? videoSystem.getVideoAPI() : null
4237
+ };
4238
+ });
4239
+ this.viji.devices = devicesWithVideo;
4131
4240
  }
4132
4241
  }
4133
4242
  class SceneAnalyzer {
@@ -4194,4 +4303,4 @@ async function setSceneCode(sceneCode) {
4194
4303
  }
4195
4304
  }
4196
4305
  self.setSceneCode = setSceneCode;
4197
- //# sourceMappingURL=viji.worker-Be0jZvYj.js.map
4306
+ //# sourceMappingURL=viji.worker-Cx1WpxdF.js.map