@stream-io/video-client 1.44.6-beta.0 → 1.46.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -804,6 +804,10 @@ export declare class Call {
804
804
  * @param trackType the kind of audio.
805
805
  */
806
806
  bindAudioElement: (audioElement: HTMLAudioElement, sessionId: string, trackType?: AudioTrackType) => (() => void) | undefined;
807
+ /**
808
+ * Plays all audio elements blocked by the browser's autoplay policy.
809
+ */
810
+ resumeAudio: () => Promise<void>;
807
811
  /**
808
812
  * Binds a DOM <img> element to this call's thumbnail (if enabled in settings).
809
813
  *
@@ -3,6 +3,7 @@ import { ConnectedEvent, UserRequest, VideoEvent } from '../../gen/coordinator';
3
3
  import { AllSfuEvents } from '../../rtc';
4
4
  import type { ConfigureLoggersOptions, LogLevel } from '@stream-io/logger';
5
5
  import type { DevicePersistenceOptions } from '../../devices/devicePersistence';
6
+ import { InputDeviceStatus } from '../../devices';
6
7
  export type UR = Record<string, unknown>;
7
8
  export type User = (Omit<UserRequest, 'role'> & {
8
9
  type?: 'authenticated';
@@ -79,7 +80,27 @@ export type MicCaptureReportEvent = {
79
80
  */
80
81
  label?: string;
81
82
  };
82
- export type StreamVideoEvent = (VideoEvent | NetworkChangedEvent | ConnectionChangedEvent | TransportChangedEvent | ConnectionRecoveredEvent | MicCaptureReportEvent) & {
83
+ export type DeviceDisconnectedEvent = {
84
+ type: 'device.disconnected';
85
+ call_cid: string;
86
+ /**
87
+ * The device status at the time it was disconnected.
88
+ */
89
+ status: InputDeviceStatus;
90
+ /**
91
+ * The disconnected device ID.
92
+ */
93
+ deviceId: string;
94
+ /**
95
+ * The human-readable label of the disconnected device.
96
+ */
97
+ label?: string;
98
+ /**
99
+ * The disconnected device kind.
100
+ */
101
+ kind: MediaDeviceKind;
102
+ };
103
+ export type StreamVideoEvent = (VideoEvent | NetworkChangedEvent | ConnectionChangedEvent | TransportChangedEvent | ConnectionRecoveredEvent | MicCaptureReportEvent | DeviceDisconnectedEvent) & {
83
104
  received_at?: string | Date;
84
105
  };
85
106
  export type StreamCallEvent = Extract<StreamVideoEvent, {
@@ -104,6 +104,7 @@ export declare abstract class DeviceManager<S extends DeviceManagerState<C>, C =
104
104
  private get mediaDeviceKind();
105
105
  private handleDisconnectedOrReplacedDevices;
106
106
  protected findDevice(devices: MediaDeviceInfo[], deviceId: string): MediaDeviceInfo | undefined;
107
+ private dispatchDeviceDisconnectedEvent;
107
108
  private persistPreference;
108
109
  protected applyPersistedPreferences(enabledInCallType: boolean): Promise<boolean>;
109
110
  private applyMutedState;
@@ -574,6 +574,10 @@ export interface ParticipantJoined {
574
574
  * @generated from protobuf field: stream.video.sfu.models.Participant participant = 2;
575
575
  */
576
576
  participant?: Participant;
577
+ /**
578
+ * @generated from protobuf field: bool is_pinned = 3;
579
+ */
580
+ isPinned: boolean;
577
581
  }
578
582
  /**
579
583
  * ParticipantJoined is fired when a user leaves a call
@@ -42,6 +42,19 @@ export declare class DynascaleManager {
42
42
  private sfuClient;
43
43
  private pendingSubscriptionsUpdate;
44
44
  readonly audioBindingsWatchdog: AudioBindingsWatchdog | undefined;
45
+ /**
46
+ * Audio elements that were blocked by the browser's autoplay policy.
47
+ * These can be retried by calling `resumeAudio()` from a user gesture.
48
+ */
49
+ private blockedAudioElementsSubject;
50
+ /**
51
+ * Whether the browser's autoplay policy is blocking audio playback.
52
+ * Will be `true` when the browser blocks autoplay (e.g., no prior user interaction).
53
+ * Use `resumeAudio()` within a user gesture to unblock.
54
+ */
55
+ autoplayBlocked$: import("rxjs").Observable<boolean>;
56
+ private addBlockedAudioElement;
57
+ private removeBlockedAudioElement;
45
58
  private videoTrackSubscriptionOverridesSubject;
46
59
  videoTrackSubscriptionOverrides$: import("rxjs").Observable<VideoTrackSubscriptionOverrides>;
47
60
  incomingVideoSettings$: import("rxjs").Observable<{
@@ -121,6 +134,13 @@ export declare class DynascaleManager {
121
134
  * @returns a cleanup function that will unbind the audio element.
122
135
  */
123
136
  bindAudioElement: (audioElement: HTMLAudioElement, sessionId: string, trackType: AudioTrackType) => (() => void) | undefined;
137
+ /**
138
+ * Plays all audio elements blocked by the browser's autoplay policy.
139
+ * Must be called from within a user gesture (e.g., click handler).
140
+ *
141
+ * @returns a promise that resolves when all blocked elements have been retried.
142
+ */
143
+ resumeAudio: () => Promise<void>;
124
144
  private getOrCreateAudioContext;
125
145
  private resumeAudioContext;
126
146
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@stream-io/video-client",
3
- "version": "1.44.6-beta.0",
3
+ "version": "1.46.0",
4
4
  "main": "dist/index.cjs.js",
5
5
  "module": "dist/index.es.js",
6
6
  "browser": "dist/index.browser.es.js",
package/src/Call.ts CHANGED
@@ -656,15 +656,15 @@ export class Call {
656
656
  reasonToEndCallReason[
657
657
  rejectReason as keyof typeof reasonToEndCallReason
658
658
  ] ?? 'rejected';
659
- globalThis.streamRNVideoSDK?.callingX?.endCall(this, endCallReason);
660
659
  await this.reject(rejectReason);
660
+ globalThis.streamRNVideoSDK?.callingX?.endCall(this, endCallReason);
661
661
  } else {
662
662
  // if reject was undefined, we still have to cancel the call automatically
663
663
  // when I am the creator and everyone else left the call
664
664
  const hasOtherParticipants = this.state.remoteParticipants.length > 0;
665
665
  if (this.isCreatedByMe && !hasOtherParticipants) {
666
- globalThis.streamRNVideoSDK?.callingX?.endCall(this, 'canceled');
667
666
  await this.reject('cancel');
667
+ globalThis.streamRNVideoSDK?.callingX?.endCall(this, 'canceled');
668
668
  }
669
669
  }
670
670
  }
@@ -2967,6 +2967,13 @@ export class Call {
2967
2967
  };
2968
2968
  };
2969
2969
 
2970
+ /**
2971
+ * Plays all audio elements blocked by the browser's autoplay policy.
2972
+ */
2973
+ resumeAudio = () => {
2974
+ return this.dynascaleManager.resumeAudio();
2975
+ };
2976
+
2970
2977
  /**
2971
2978
  * Binds a DOM <img> element to this call's thumbnail (if enabled in settings).
2972
2979
  *
@@ -3,6 +3,7 @@ import { ConnectedEvent, UserRequest, VideoEvent } from '../../gen/coordinator';
3
3
  import { AllSfuEvents } from '../../rtc';
4
4
  import type { ConfigureLoggersOptions, LogLevel } from '@stream-io/logger';
5
5
  import type { DevicePersistenceOptions } from '../../devices/devicePersistence';
6
+ import { InputDeviceStatus } from '../../devices';
6
7
 
7
8
  export type UR = Record<string, unknown>;
8
9
 
@@ -126,6 +127,27 @@ export type MicCaptureReportEvent = {
126
127
  label?: string;
127
128
  };
128
129
 
130
+ export type DeviceDisconnectedEvent = {
131
+ type: 'device.disconnected';
132
+ call_cid: string;
133
+ /**
134
+ * The device status at the time it was disconnected.
135
+ */
136
+ status: InputDeviceStatus;
137
+ /**
138
+ * The disconnected device ID.
139
+ */
140
+ deviceId: string;
141
+ /**
142
+ * The human-readable label of the disconnected device.
143
+ */
144
+ label?: string;
145
+ /**
146
+ * The disconnected device kind.
147
+ */
148
+ kind: MediaDeviceKind;
149
+ };
150
+
129
151
  export type StreamVideoEvent = (
130
152
  | VideoEvent
131
153
  | NetworkChangedEvent
@@ -133,6 +155,7 @@ export type StreamVideoEvent = (
133
155
  | TransportChangedEvent
134
156
  | ConnectionRecoveredEvent
135
157
  | MicCaptureReportEvent
158
+ | DeviceDisconnectedEvent
136
159
  ) & { received_at?: string | Date };
137
160
 
138
161
  // TODO: we should use WSCallEvent here but that needs fixing
@@ -1,5 +1,6 @@
1
1
  import { combineLatest, firstValueFrom, Observable, pairwise } from 'rxjs';
2
2
  import { Call } from '../Call';
3
+ import type { DeviceDisconnectedEvent } from '../coordinator/connection/types';
3
4
  import { TrackPublishOptions } from '../rtc';
4
5
  import { CallingState } from '../store';
5
6
  import { createSubscription, getCurrentValue } from '../store/rxUtils';
@@ -13,6 +14,7 @@ import { ScopedLogger, videoLoggerSystem } from '../logger';
13
14
  import { TrackType } from '../gen/video/sfu/models/models';
14
15
  import { deviceIds$ } from './devices';
15
16
  import {
17
+ hasPending,
16
18
  settled,
17
19
  withCancellation,
18
20
  withoutConcurrency,
@@ -543,6 +545,7 @@ export abstract class DeviceManager<
543
545
  }
544
546
 
545
547
  if (isDeviceDisconnected) {
548
+ this.dispatchDeviceDisconnectedEvent(prevDevice!);
546
549
  await this.disable();
547
550
  await this.select(undefined);
548
551
  }
@@ -553,7 +556,7 @@ export abstract class DeviceManager<
553
556
  ) {
554
557
  await this.enable();
555
558
  this.isTrackStoppedDueToTrackEnd = false;
556
- } else {
559
+ } else if (!hasPending(this.statusChangeConcurrencyTag)) {
557
560
  await this.applySettingsToStream();
558
561
  }
559
562
  }
@@ -573,6 +576,22 @@ export abstract class DeviceManager<
573
576
  return devices.find((d) => d.deviceId === deviceId && d.kind === kind);
574
577
  }
575
578
 
579
+ private dispatchDeviceDisconnectedEvent(device: MediaDeviceInfo) {
580
+ const event: DeviceDisconnectedEvent = {
581
+ type: 'device.disconnected',
582
+ call_cid: this.call.cid,
583
+ status: this.isTrackStoppedDueToTrackEnd
584
+ ? this.state.prevStatus
585
+ : this.state.status,
586
+ deviceId: device.deviceId,
587
+ label: device.label,
588
+ kind: device.kind,
589
+ };
590
+
591
+ this.call.tracer.trace('device.disconnected', event);
592
+ this.call.streamClient.dispatchEvent(event);
593
+ }
594
+
576
595
  private persistPreference(
577
596
  selectedDevice: string | undefined,
578
597
  status: InputDeviceStatus,
@@ -158,6 +158,7 @@ export class MicrophoneManager extends AudioDeviceManager<MicrophoneManagerState
158
158
  const devices = await firstValueFrom(this.listDevices());
159
159
  const label = devices.find((d) => d.deviceId === deviceId)?.label;
160
160
 
161
+ let lastCapturesAudio: boolean | undefined;
161
162
  this.noAudioDetectorCleanup = createNoAudioDetector(mediaStream, {
162
163
  noAudioThresholdMs: this.silenceThresholdMs,
163
164
  emitIntervalMs: this.silenceThresholdMs,
@@ -169,7 +170,12 @@ export class MicrophoneManager extends AudioDeviceManager<MicrophoneManagerState
169
170
  deviceId,
170
171
  label,
171
172
  };
172
- this.call.tracer.trace('mic.capture_report', event);
173
+
174
+ if (capturesAudio !== lastCapturesAudio) {
175
+ lastCapturesAudio = capturesAudio;
176
+ this.call.tracer.trace('mic.capture_report', event);
177
+ }
178
+
173
179
  this.call.streamClient.dispatchEvent(event);
174
180
  },
175
181
  });
@@ -380,6 +380,14 @@ describe('Device Manager', () => {
380
380
 
381
381
  expect(manager.state.selectedDevice).toBe(undefined);
382
382
  expect(manager.state.status).toBe('disabled');
383
+ expect(manager['call'].streamClient.dispatchEvent).toHaveBeenCalledWith({
384
+ type: 'device.disconnected',
385
+ call_cid: manager['call'].cid,
386
+ status: 'enabled',
387
+ deviceId: device.deviceId,
388
+ label: device.label,
389
+ kind: device.kind,
390
+ });
383
391
 
384
392
  vi.useRealTimers();
385
393
  });
@@ -95,9 +95,13 @@ export const mockCall = (): Partial<Call> => {
95
95
  }),
96
96
  );
97
97
  return {
98
+ cid: 'default:test-call',
98
99
  state: callState,
99
100
  publish: vi.fn(),
100
101
  stopPublish: vi.fn(),
102
+ streamClient: fromPartial({
103
+ dispatchEvent: vi.fn(),
104
+ }),
101
105
  notifyNoiseCancellationStarting: vi.fn().mockResolvedValue(undefined),
102
106
  notifyNoiseCancellationStopped: vi.fn().mockResolvedValue(undefined),
103
107
  tracer: new Tracer('tests'),
@@ -74,6 +74,47 @@ describe('Participant events', () => {
74
74
 
75
75
  expect(state.participants).toEqual([]);
76
76
  });
77
+
78
+ it('sets a server-side pin when isPinned is true', () => {
79
+ const state = new CallState();
80
+ state.setSortParticipantsBy(noopComparator());
81
+
82
+ const onParticipantJoined = watchParticipantJoined(state);
83
+ const now = Date.now();
84
+
85
+ onParticipantJoined({
86
+ // @ts-expect-error incomplete data
87
+ participant: {
88
+ userId: 'user-id',
89
+ sessionId: 'session-id',
90
+ },
91
+ isPinned: true,
92
+ });
93
+
94
+ const participant = state.findParticipantBySessionId('session-id');
95
+ expect(participant?.pin).toBeDefined();
96
+ expect(participant?.pin?.isLocalPin).toBe(false);
97
+ expect(participant?.pin?.pinnedAt).toBeGreaterThanOrEqual(now);
98
+ });
99
+
100
+ it('does not set a pin when isPinned is false', () => {
101
+ const state = new CallState();
102
+ state.setSortParticipantsBy(noopComparator());
103
+
104
+ const onParticipantJoined = watchParticipantJoined(state);
105
+
106
+ onParticipantJoined({
107
+ // @ts-expect-error incomplete data
108
+ participant: {
109
+ userId: 'user-id',
110
+ sessionId: 'session-id',
111
+ },
112
+ isPinned: false,
113
+ });
114
+
115
+ const participant = state.findParticipantBySessionId('session-id');
116
+ expect(participant?.pin).toBeUndefined();
117
+ });
77
118
  });
78
119
 
79
120
  describe('orphaned tracks reconciliation', () => {
@@ -38,6 +38,7 @@ export const watchParticipantJoined = (state: CallState) => {
38
38
  StreamVideoParticipantPatch | undefined,
39
39
  Partial<StreamVideoParticipant>
40
40
  >(participant, orphanedTracks, {
41
+ ...(e.isPinned && { pin: { isLocalPin: false, pinnedAt: Date.now() } }),
41
42
  viewportVisibilityState: {
42
43
  videoTrack: VisibilityState.UNKNOWN,
43
44
  screenShareTrack: VisibilityState.UNKNOWN,
@@ -625,6 +625,10 @@ export interface ParticipantJoined {
625
625
  * @generated from protobuf field: stream.video.sfu.models.Participant participant = 2;
626
626
  */
627
627
  participant?: Participant;
628
+ /**
629
+ * @generated from protobuf field: bool is_pinned = 3;
630
+ */
631
+ isPinned: boolean;
628
632
  }
629
633
  /**
630
634
  * ParticipantJoined is fired when a user leaves a call
@@ -1557,6 +1561,7 @@ class ParticipantJoined$Type extends MessageType<ParticipantJoined> {
1557
1561
  super('stream.video.sfu.event.ParticipantJoined', [
1558
1562
  { no: 1, name: 'call_cid', kind: 'scalar', T: 9 /*ScalarType.STRING*/ },
1559
1563
  { no: 2, name: 'participant', kind: 'message', T: () => Participant },
1564
+ { no: 3, name: 'is_pinned', kind: 'scalar', T: 8 /*ScalarType.BOOL*/ },
1560
1565
  ]);
1561
1566
  }
1562
1567
  }
@@ -3,6 +3,7 @@ import { CallingState, CallState } from '../store';
3
3
  import { createSubscription } from '../store/rxUtils';
4
4
  import { videoLoggerSystem } from '../logger';
5
5
  import { Tracer } from '../stats';
6
+ import { TrackType } from '../gen/video/sfu/models/models';
6
7
 
7
8
  const toBindingKey = (
8
9
  sessionId: string,
@@ -91,12 +92,23 @@ export class AudioBindingsWatchdog {
91
92
  const danglingUserIds: string[] = [];
92
93
  for (const p of this.state.participants) {
93
94
  if (p.isLocalParticipant) continue;
94
- const { audioStream, screenShareAudioStream, sessionId, userId } = p;
95
- if (audioStream && !this.bindings.has(toBindingKey(sessionId))) {
95
+ const {
96
+ audioStream,
97
+ screenShareAudioStream,
98
+ sessionId,
99
+ userId,
100
+ publishedTracks,
101
+ } = p;
102
+ if (
103
+ audioStream &&
104
+ publishedTracks.includes(TrackType.AUDIO) &&
105
+ !this.bindings.has(toBindingKey(sessionId))
106
+ ) {
96
107
  danglingUserIds.push(userId);
97
108
  }
98
109
  if (
99
110
  screenShareAudioStream &&
111
+ publishedTracks.includes(TrackType.SCREEN_SHARE_AUDIO) &&
100
112
  !this.bindings.has(toBindingKey(sessionId, 'screenShareAudioTrack'))
101
113
  ) {
102
114
  danglingUserIds.push(userId);
@@ -79,6 +79,40 @@ export class DynascaleManager {
79
79
  private pendingSubscriptionsUpdate: NodeJS.Timeout | null = null;
80
80
  readonly audioBindingsWatchdog: AudioBindingsWatchdog | undefined;
81
81
 
82
+ /**
83
+ * Audio elements that were blocked by the browser's autoplay policy.
84
+ * These can be retried by calling `resumeAudio()` from a user gesture.
85
+ */
86
+ private blockedAudioElementsSubject = new BehaviorSubject<
87
+ Set<HTMLAudioElement>
88
+ >(new Set());
89
+
90
+ /**
91
+ * Whether the browser's autoplay policy is blocking audio playback.
92
+ * Will be `true` when the browser blocks autoplay (e.g., no prior user interaction).
93
+ * Use `resumeAudio()` within a user gesture to unblock.
94
+ */
95
+ autoplayBlocked$ = this.blockedAudioElementsSubject.pipe(
96
+ map((elements) => elements.size > 0),
97
+ distinctUntilChanged(),
98
+ );
99
+
100
+ private addBlockedAudioElement = (audioElement: HTMLAudioElement) => {
101
+ setCurrentValue(this.blockedAudioElementsSubject, (elements) => {
102
+ const next = new Set(elements);
103
+ next.add(audioElement);
104
+ return next;
105
+ });
106
+ };
107
+
108
+ private removeBlockedAudioElement = (audioElement: HTMLAudioElement) => {
109
+ setCurrentValue(this.blockedAudioElementsSubject, (elements) => {
110
+ const nextElements = new Set(elements);
111
+ nextElements.delete(audioElement);
112
+ return nextElements;
113
+ });
114
+ };
115
+
82
116
  private videoTrackSubscriptionOverridesSubject =
83
117
  new BehaviorSubject<VideoTrackSubscriptionOverrides>({});
84
118
 
@@ -136,6 +170,7 @@ export class DynascaleManager {
136
170
  clearTimeout(this.pendingSubscriptionsUpdate);
137
171
  }
138
172
  this.audioBindingsWatchdog?.dispose();
173
+ setCurrentValue(this.blockedAudioElementsSubject, new Set());
139
174
  const context = this.audioContext;
140
175
  if (context && context.state !== 'closed') {
141
176
  document.removeEventListener('click', this.resumeAudioContext);
@@ -575,7 +610,10 @@ export class DynascaleManager {
575
610
 
576
611
  setTimeout(() => {
577
612
  audioElement.srcObject = source ?? null;
578
- if (!source) return;
613
+ if (!source) {
614
+ this.removeBlockedAudioElement(audioElement);
615
+ return;
616
+ }
579
617
 
580
618
  // Safari has a special quirk that prevents playing audio until the user
581
619
  // interacts with the page or focuses on the tab where the call happens.
@@ -599,6 +637,10 @@ export class DynascaleManager {
599
637
  audioElement.muted = false;
600
638
  audioElement.play().catch((e) => {
601
639
  this.tracer.trace('audioPlaybackError', e.message);
640
+ if (e.name === 'NotAllowedError') {
641
+ this.tracer.trace('audioPlaybackBlocked', null);
642
+ this.addBlockedAudioElement(audioElement);
643
+ }
602
644
  this.logger.warn(`Failed to play audio stream`, e);
603
645
  });
604
646
  }
@@ -628,6 +670,7 @@ export class DynascaleManager {
628
670
 
629
671
  return () => {
630
672
  this.audioBindingsWatchdog?.unregister(sessionId, trackType);
673
+ this.removeBlockedAudioElement(audioElement);
631
674
  sinkIdSubscription?.unsubscribe();
632
675
  volumeSubscription.unsubscribe();
633
676
  updateMediaStreamSubscription.unsubscribe();
@@ -637,6 +680,34 @@ export class DynascaleManager {
637
680
  };
638
681
  };
639
682
 
683
+ /**
684
+ * Plays all audio elements blocked by the browser's autoplay policy.
685
+ * Must be called from within a user gesture (e.g., click handler).
686
+ *
687
+ * @returns a promise that resolves when all blocked elements have been retried.
688
+ */
689
+ resumeAudio = async () => {
690
+ this.tracer.trace('resumeAudio', null);
691
+ const blocked = new Set<HTMLAudioElement>();
692
+ await Promise.all(
693
+ Array.from(
694
+ getCurrentValue(this.blockedAudioElementsSubject),
695
+ async (el) => {
696
+ try {
697
+ if (el.srcObject) {
698
+ await el.play();
699
+ }
700
+ } catch {
701
+ this.logger.warn(`Can't resume audio for element: `, el);
702
+ blocked.add(el);
703
+ }
704
+ },
705
+ ),
706
+ );
707
+
708
+ setCurrentValue(this.blockedAudioElementsSubject, blocked);
709
+ };
710
+
640
711
  private getOrCreateAudioContext = (): AudioContext | undefined => {
641
712
  if (!this.useWebAudio) return;
642
713
  if (this.audioContext) return this.audioContext;
@@ -23,11 +23,15 @@ export class RNSpeechDetector {
23
23
  : await navigator.mediaDevices.getUserMedia({ audio: true });
24
24
  this.audioStream = audioStream;
25
25
 
26
- this.pc1.addEventListener('icecandidate', async (e) => {
27
- await this.pc2.addIceCandidate(e.candidate);
26
+ this.pc1.addEventListener('icecandidate', (e) => {
27
+ this.pc2.addIceCandidate(e.candidate).catch(() => {
28
+ // do nothing
29
+ });
28
30
  });
29
31
  this.pc2.addEventListener('icecandidate', async (e) => {
30
- await this.pc1.addIceCandidate(e.candidate);
32
+ this.pc1.addIceCandidate(e.candidate).catch(() => {
33
+ // do nothing
34
+ });
31
35
  });
32
36
  this.pc2.addEventListener('track', (e) => {
33
37
  e.streams[0].getTracks().forEach((track) => {
@@ -11,6 +11,7 @@ import { StreamClient } from '../../coordinator/connection/client';
11
11
  import { CallingState, StreamVideoWriteableStateStore } from '../../store';
12
12
  import { noopComparator } from '../../sorting';
13
13
  import { fromPartial } from '@total-typescript/shoehorn';
14
+ import { TrackType } from '../../gen/video/sfu/models/models';
14
15
 
15
16
  describe('AudioBindingsWatchdog', () => {
16
17
  let watchdog: AudioBindingsWatchdog;
@@ -44,12 +45,17 @@ describe('AudioBindingsWatchdog', () => {
44
45
  screenShareAudioStream?: MediaStream;
45
46
  },
46
47
  ) => {
48
+ const publishedTracks = [];
49
+ if (streams?.audioStream) publishedTracks.push(TrackType.AUDIO);
50
+ if (streams?.screenShareAudioStream) {
51
+ publishedTracks.push(TrackType.SCREEN_SHARE_AUDIO);
52
+ }
47
53
  call.state.updateOrAddParticipant(
48
54
  sessionId,
49
55
  fromPartial({
50
56
  userId,
51
57
  sessionId,
52
- publishedTracks: [],
58
+ publishedTracks,
53
59
  ...streams,
54
60
  }),
55
61
  );
@@ -233,6 +239,26 @@ describe('AudioBindingsWatchdog', () => {
233
239
  expect(warnSpy).toHaveBeenCalledWith(expect.stringContaining('user-1'));
234
240
  });
235
241
 
242
+ it('should not warn when audioStream exists but audio is not published', () => {
243
+ // @ts-expect-error private property
244
+ const warnSpy = vi.spyOn(watchdog.logger, 'warn');
245
+
246
+ call.state.updateOrAddParticipant(
247
+ 'session-1',
248
+ fromPartial({
249
+ userId: 'user-1',
250
+ sessionId: 'session-1',
251
+ publishedTracks: [],
252
+ audioStream: new MediaStream(),
253
+ }),
254
+ );
255
+
256
+ call.state.setCallingState(CallingState.JOINED);
257
+ vi.advanceTimersByTime(3000);
258
+
259
+ expect(warnSpy).not.toHaveBeenCalled();
260
+ });
261
+
236
262
  it('should not warn when screenShareAudio element is bound', () => {
237
263
  // @ts-expect-error private property
238
264
  const warnSpy = vi.spyOn(watchdog.logger, 'warn');