@stream-io/video-client 0.3.13 → 0.3.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -28,6 +28,5 @@ export declare class CameraManager extends InputMediaDeviceManager<CameraManager
28
28
  protected getStream(constraints: MediaTrackConstraints): Promise<MediaStream>;
29
29
  protected publishStream(stream: MediaStream): Promise<void>;
30
30
  protected stopPublishStream(stopTracks: boolean): Promise<void>;
31
- protected muteTracks(): void;
32
- protected unmuteTracks(): void;
31
+ protected getTrack(): MediaStreamTrack | undefined;
33
32
  }
@@ -1,9 +1,12 @@
1
1
  import { Observable } from 'rxjs';
2
2
  import { Call } from '../Call';
3
3
  import { InputMediaDeviceManagerState } from './InputMediaDeviceManagerState';
4
+ import { Logger } from '../coordinator/connection/types';
5
+ import { TrackType } from '../gen/video/sfu/models/models';
4
6
  export declare abstract class InputMediaDeviceManager<T extends InputMediaDeviceManagerState> {
5
7
  protected readonly call: Call;
6
8
  readonly state: T;
9
+ protected readonly trackType: TrackType;
7
10
  /**
8
11
  * @internal
9
12
  */
@@ -12,7 +15,8 @@ export declare abstract class InputMediaDeviceManager<T extends InputMediaDevice
12
15
  * @internal
13
16
  */
14
17
  disablePromise?: Promise<void>;
15
- constructor(call: Call, state: T);
18
+ logger: Logger;
19
+ constructor(call: Call, state: T, trackType: TrackType);
16
20
  /**
17
21
  * Lists the available audio/video devices
18
22
  *
@@ -54,8 +58,11 @@ export declare abstract class InputMediaDeviceManager<T extends InputMediaDevice
54
58
  protected abstract getStream(constraints: MediaTrackConstraints): Promise<MediaStream>;
55
59
  protected abstract publishStream(stream: MediaStream): Promise<void>;
56
60
  protected abstract stopPublishStream(stopTracks: boolean): Promise<void>;
57
- protected abstract muteTracks(): void;
58
- protected abstract unmuteTracks(): void;
61
+ protected abstract getTrack(): undefined | MediaStreamTrack;
59
62
  private muteStream;
63
+ private muteTrack;
64
+ private unmuteTrack;
65
+ private stopTrack;
66
+ private muteLocalStream;
60
67
  private unmuteStream;
61
68
  }
@@ -8,6 +8,5 @@ export declare class MicrophoneManager extends InputMediaDeviceManager<Microphon
8
8
  protected getStream(constraints: MediaTrackConstraints): Promise<MediaStream>;
9
9
  protected publishStream(stream: MediaStream): Promise<void>;
10
10
  protected stopPublishStream(stopTracks: boolean): Promise<void>;
11
- protected muteTracks(): void;
12
- protected unmuteTracks(): void;
11
+ protected getTrack(): MediaStreamTrack | undefined;
13
12
  }
@@ -31,6 +31,14 @@ export declare class Publisher {
31
31
  private transceiverInitOrder;
32
32
  private readonly trackKindMapping;
33
33
  private readonly trackLayersCache;
34
+ /**
35
+ * A map keeping track of track types that were published to the SFU.
36
+ * This map shouldn't be cleared when unpublishing a track, as it is used
37
+ * to determine whether a track was published before.
38
+ *
39
+ * @private
40
+ */
41
+ private readonly trackTypePublishHistory;
34
42
  private readonly isDtxEnabled;
35
43
  private readonly isRedEnabled;
36
44
  private readonly preferredVideoCodec?;
@@ -85,6 +93,20 @@ export declare class Publisher {
85
93
  * @param trackType the track type to check.
86
94
  */
87
95
  isPublishing: (trackType: TrackType) => boolean;
96
+ /**
97
+ * Returns true if the given track type was ever published to the SFU.
98
+ * Contrary to `isPublishing`, this method returns true if a certain
99
+ * track type was published before, even if it is currently unpublished.
100
+ *
101
+ * @param trackType the track type to check.
102
+ */
103
+ hasEverPublished: (trackType: TrackType) => boolean;
104
+ /**
105
+ * Returns true if the given track type is currently live
106
+ *
107
+ * @param trackType the track type to check.
108
+ */
109
+ isLive: (trackType: TrackType) => boolean;
88
110
  private notifyTrackMuteStateChanged;
89
111
  /**
90
112
  * Stops publishing all tracks and stop all tracks.
package/dist/version.d.ts CHANGED
@@ -1 +1 @@
1
- export declare const version = "0.3.13";
1
+ export declare const version = "0.3.15";
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@stream-io/video-client",
3
- "version": "0.3.13",
3
+ "version": "0.3.15",
4
4
  "packageManager": "yarn@3.2.4",
5
5
  "main": "dist/index.cjs.js",
6
6
  "module": "dist/index.es.js",
package/src/Call.ts CHANGED
@@ -277,6 +277,41 @@ export class Call {
277
277
 
278
278
  this.camera = new CameraManager(this);
279
279
  this.microphone = new MicrophoneManager(this);
280
+
281
+ // FIXME OL: disable soft-mutes as they are not working properly
282
+ // this.state.localParticipant$.subscribe(async (p) => {
283
+ // if (!this.publisher) return;
284
+ // // Mute via device manager
285
+ // // If integrator doesn't use device manager, we mute using stopPublish
286
+ // if (
287
+ // this.publisher.hasEverPublished(TrackType.VIDEO) &&
288
+ // this.publisher.isPublishing(TrackType.VIDEO) &&
289
+ // !p?.publishedTracks.includes(TrackType.VIDEO)
290
+ // ) {
291
+ // this.logger(
292
+ // 'info',
293
+ // `Local participant's video track is muted remotely`,
294
+ // );
295
+ // await this.camera.disable();
296
+ // if (this.publisher.isPublishing(TrackType.VIDEO)) {
297
+ // await this.stopPublish(TrackType.VIDEO);
298
+ // }
299
+ // }
300
+ // if (
301
+ // this.publisher.hasEverPublished(TrackType.AUDIO) &&
302
+ // this.publisher.isPublishing(TrackType.AUDIO) &&
303
+ // !p?.publishedTracks.includes(TrackType.AUDIO)
304
+ // ) {
305
+ // this.logger(
306
+ // 'info',
307
+ // `Local participant's audio track is muted remotely`,
308
+ // );
309
+ // await this.microphone.disable();
310
+ // if (this.publisher.isPublishing(TrackType.AUDIO)) {
311
+ // await this.stopPublish(TrackType.AUDIO);
312
+ // }
313
+ // }
314
+ // });
280
315
  this.speaker = new SpeakerManager();
281
316
  }
282
317
 
@@ -309,10 +344,50 @@ export class Call {
309
344
  const hasPermission = this.permissionsContext.hasPermission(
310
345
  permission as OwnCapability,
311
346
  );
312
- if (!hasPermission && this.publisher.isPublishing(trackType)) {
313
- this.stopPublish(trackType).catch((err) => {
314
- this.logger('error', `Error stopping publish ${trackType}`, err);
315
- });
347
+ if (
348
+ !hasPermission &&
349
+ (this.publisher.isPublishing(trackType) ||
350
+ this.publisher.isLive(trackType))
351
+ ) {
352
+ // Stop tracks, then notify device manager
353
+ this.stopPublish(trackType)
354
+ .catch((err) => {
355
+ this.logger(
356
+ 'error',
357
+ `Error stopping publish ${trackType}`,
358
+ err,
359
+ );
360
+ })
361
+ .then(() => {
362
+ if (
363
+ trackType === TrackType.VIDEO &&
364
+ this.camera.state.status === 'enabled'
365
+ ) {
366
+ this.camera
367
+ .disable()
368
+ .catch((err) =>
369
+ this.logger(
370
+ 'error',
371
+ `Error disabling camera after pemission revoked`,
372
+ err,
373
+ ),
374
+ );
375
+ }
376
+ if (
377
+ trackType === TrackType.AUDIO &&
378
+ this.microphone.state.status === 'enabled'
379
+ ) {
380
+ this.microphone
381
+ .disable()
382
+ .catch((err) =>
383
+ this.logger(
384
+ 'error',
385
+ `Error disabling microphone after pemission revoked`,
386
+ err,
387
+ ),
388
+ );
389
+ }
390
+ });
316
391
  }
317
392
  }
318
393
  }),
@@ -1112,7 +1187,10 @@ export class Call {
1112
1187
  * @param stopTrack if `true` the track will be stopped, else it will be just disabled
1113
1188
  */
1114
1189
  stopPublish = async (trackType: TrackType, stopTrack: boolean = true) => {
1115
- this.logger('info', `stopPublish ${TrackType[trackType]}`);
1190
+ this.logger(
1191
+ 'info',
1192
+ `stopPublish ${TrackType[trackType]}, stop tracks: ${stopTrack}`,
1193
+ );
1116
1194
  await this.publisher?.unpublishStream(trackType, stopTrack);
1117
1195
  };
1118
1196
 
@@ -396,5 +396,9 @@ const retryable = async <I extends object, O extends SfuResponseWithError>(
396
396
  retryAttempt < MAX_RETRIES
397
397
  );
398
398
 
399
+ if (rpcCallResult.response.error) {
400
+ throw rpcCallResult.response.error;
401
+ }
402
+
399
403
  return rpcCallResult;
400
404
  };
@@ -5,7 +5,7 @@ const HINT_URL = `https://hint.stream-io-video.com/`;
5
5
 
6
6
  export const getLocationHint = async (
7
7
  hintUrl: string = HINT_URL,
8
- timeout: number = 1500,
8
+ timeout: number = 2000,
9
9
  ) => {
10
10
  const abortController = new AbortController();
11
11
  const timeoutId = setTimeout(() => abortController.abort(), timeout);
@@ -18,7 +18,7 @@ export const getLocationHint = async (
18
18
  logger('debug', `Location header: ${awsPop}`);
19
19
  return awsPop.substring(0, 3); // AMS1-P2 -> AMS
20
20
  } catch (e) {
21
- logger('error', `Failed to get location hint from ${HINT_URL}`, e);
21
+ logger('warn', `Failed to get location hint from ${HINT_URL}`, e);
22
22
  return 'ERR';
23
23
  } finally {
24
24
  clearTimeout(timeoutId);
@@ -12,7 +12,7 @@ export class CameraManager extends InputMediaDeviceManager<CameraManagerState> {
12
12
  };
13
13
 
14
14
  constructor(call: Call) {
15
- super(call, new CameraManagerState());
15
+ super(call, new CameraManagerState(), TrackType.VIDEO);
16
16
  }
17
17
 
18
18
  /**
@@ -59,6 +59,10 @@ export class CameraManager extends InputMediaDeviceManager<CameraManagerState> {
59
59
  height !== this.targetResolution.height
60
60
  )
61
61
  await this.applySettingsToStream();
62
+ this.logger(
63
+ 'debug',
64
+ `${width}x${height} target resolution applied to media stream`,
65
+ );
62
66
  }
63
67
  }
64
68
 
@@ -85,12 +89,7 @@ export class CameraManager extends InputMediaDeviceManager<CameraManagerState> {
85
89
  return this.call.stopPublish(TrackType.VIDEO, stopTracks);
86
90
  }
87
91
 
88
- protected muteTracks(): void {
89
- this.state.mediaStream
90
- ?.getVideoTracks()
91
- .forEach((t) => (t.enabled = false));
92
- }
93
- protected unmuteTracks(): void {
94
- this.state.mediaStream?.getVideoTracks().forEach((t) => (t.enabled = true));
92
+ protected getTrack() {
93
+ return this.state.mediaStream?.getVideoTracks()[0];
95
94
  }
96
95
  }
@@ -2,8 +2,10 @@ import { Observable } from 'rxjs';
2
2
  import { Call } from '../Call';
3
3
  import { CallingState } from '../store';
4
4
  import { InputMediaDeviceManagerState } from './InputMediaDeviceManagerState';
5
- import { disposeOfMediaStream } from './devices';
6
5
  import { isReactNative } from '../helpers/platforms';
6
+ import { Logger } from '../coordinator/connection/types';
7
+ import { getLogger } from '../logger';
8
+ import { TrackType } from '../gen/video/sfu/models/models';
7
9
 
8
10
  export abstract class InputMediaDeviceManager<
9
11
  T extends InputMediaDeviceManagerState,
@@ -16,7 +18,15 @@ export abstract class InputMediaDeviceManager<
16
18
  * @internal
17
19
  */
18
20
  disablePromise?: Promise<void>;
19
- constructor(protected readonly call: Call, public readonly state: T) {}
21
+ logger: Logger;
22
+
23
+ constructor(
24
+ protected readonly call: Call,
25
+ public readonly state: T,
26
+ protected readonly trackType: TrackType,
27
+ ) {
28
+ this.logger = getLogger([`${TrackType[trackType].toLowerCase()} manager`]);
29
+ }
20
30
 
21
31
  /**
22
32
  * Lists the available audio/video devices
@@ -129,32 +139,68 @@ export abstract class InputMediaDeviceManager<
129
139
 
130
140
  protected abstract stopPublishStream(stopTracks: boolean): Promise<void>;
131
141
 
132
- protected abstract muteTracks(): void;
133
-
134
- protected abstract unmuteTracks(): void;
142
+ protected abstract getTrack(): undefined | MediaStreamTrack;
135
143
 
136
144
  private async muteStream(stopTracks: boolean = true) {
137
145
  if (!this.state.mediaStream) {
138
146
  return;
139
147
  }
148
+ this.logger('debug', `${stopTracks ? 'Stopping' : 'Disabling'} stream`);
140
149
  if (this.call.state.callingState === CallingState.JOINED) {
141
150
  await this.stopPublishStream(stopTracks);
142
- } else if (this.state.mediaStream) {
143
- stopTracks
144
- ? disposeOfMediaStream(this.state.mediaStream)
145
- : this.muteTracks();
146
151
  }
147
- if (stopTracks) {
152
+ this.muteLocalStream(stopTracks);
153
+ if (this.getTrack()?.readyState === 'ended') {
154
+ // @ts-expect-error release() is present in react-native-webrtc and must be called to dispose the stream
155
+ if (typeof this.state.mediaStream.release === 'function') {
156
+ // @ts-expect-error
157
+ this.state.mediaStream.release();
158
+ }
148
159
  this.state.setMediaStream(undefined);
149
160
  }
150
161
  }
151
162
 
163
+ private muteTrack() {
164
+ const track = this.getTrack();
165
+ if (!track || !track.enabled) {
166
+ return;
167
+ }
168
+ track.enabled = false;
169
+ }
170
+
171
+ private unmuteTrack() {
172
+ const track = this.getTrack();
173
+ if (!track || track.enabled) {
174
+ return;
175
+ }
176
+ track.enabled = true;
177
+ }
178
+
179
+ private stopTrack() {
180
+ const track = this.getTrack();
181
+ if (!track || track.readyState === 'ended') {
182
+ return;
183
+ }
184
+ track.stop();
185
+ }
186
+
187
+ private muteLocalStream(stopTracks: boolean) {
188
+ if (!this.state.mediaStream) {
189
+ return;
190
+ }
191
+ stopTracks ? this.stopTrack() : this.muteTrack();
192
+ }
193
+
152
194
  private async unmuteStream() {
195
+ this.logger('debug', 'Starting stream');
153
196
  let stream: MediaStream;
154
- if (this.state.mediaStream) {
197
+ if (this.state.mediaStream && this.getTrack()?.readyState === 'live') {
155
198
  stream = this.state.mediaStream;
156
- this.unmuteTracks();
199
+ this.unmuteTrack();
157
200
  } else {
201
+ if (this.state.mediaStream) {
202
+ this.stopTrack();
203
+ }
158
204
  const constraints = { deviceId: this.state.selectedDevice };
159
205
  stream = await this.getStream(constraints);
160
206
  }
@@ -7,7 +7,7 @@ import { TrackType } from '../gen/video/sfu/models/models';
7
7
 
8
8
  export class MicrophoneManager extends InputMediaDeviceManager<MicrophoneManagerState> {
9
9
  constructor(call: Call) {
10
- super(call, new MicrophoneManagerState());
10
+ super(call, new MicrophoneManagerState(), TrackType.AUDIO);
11
11
  }
12
12
 
13
13
  protected getDevices(): Observable<MediaDeviceInfo[]> {
@@ -25,12 +25,7 @@ export class MicrophoneManager extends InputMediaDeviceManager<MicrophoneManager
25
25
  return this.call.stopPublish(TrackType.AUDIO, stopTracks);
26
26
  }
27
27
 
28
- protected muteTracks(): void {
29
- this.state.mediaStream
30
- ?.getAudioTracks()
31
- .forEach((t) => (t.enabled = false));
32
- }
33
- protected unmuteTracks(): void {
34
- this.state.mediaStream?.getAudioTracks().forEach((t) => (t.enabled = true));
28
+ protected getTrack() {
29
+ return this.state.mediaStream?.getAudioTracks()[0];
35
30
  }
36
31
  }
@@ -7,14 +7,7 @@ import { mockCall, mockVideoDevices, mockVideoStream } from './mocks';
7
7
  import { InputMediaDeviceManager } from '../InputMediaDeviceManager';
8
8
  import { InputMediaDeviceManagerState } from '../InputMediaDeviceManagerState';
9
9
  import { of } from 'rxjs';
10
- import { disposeOfMediaStream } from '../devices';
11
-
12
- vi.mock('../devices.ts', () => {
13
- console.log('MOCKING devices');
14
- return {
15
- disposeOfMediaStream: vi.fn(),
16
- };
17
- });
10
+ import { TrackType } from '../../gen/video/sfu/models/models';
18
11
 
19
12
  vi.mock('../../Call.ts', () => {
20
13
  console.log('MOCKING Call');
@@ -32,11 +25,10 @@ class TestInputMediaDeviceManager extends InputMediaDeviceManager<TestInputMedia
32
25
  public getStream = vi.fn(() => Promise.resolve(mockVideoStream()));
33
26
  public publishStream = vi.fn();
34
27
  public stopPublishStream = vi.fn();
35
- public muteTracks = vi.fn();
36
- public unmuteTracks = vi.fn();
28
+ public getTrack = () => this.state.mediaStream!.getVideoTracks()[0];
37
29
 
38
30
  constructor(call: Call) {
39
- super(call, new TestInputMediaDeviceManagerState());
31
+ super(call, new TestInputMediaDeviceManagerState(), TrackType.VIDEO);
40
32
  }
41
33
  }
42
34
 
@@ -135,11 +127,12 @@ describe('InputMediaDeviceManager.test', () => {
135
127
  it('select device when status is enabled', async () => {
136
128
  await manager.enable();
137
129
  const prevStream = manager.state.mediaStream;
130
+ vi.spyOn(prevStream!.getVideoTracks()[0], 'stop');
138
131
 
139
132
  const deviceId = mockVideoDevices[1].deviceId;
140
133
  await manager.select(deviceId);
141
134
 
142
- expect(disposeOfMediaStream).toHaveBeenCalledWith(prevStream);
135
+ expect(prevStream!.getVideoTracks()[0].stop).toHaveBeenCalledWith();
143
136
  });
144
137
 
145
138
  it('select device when status is enabled and in call', async () => {
@@ -93,6 +93,10 @@ export const mockVideoStream = () => {
93
93
  height: 720,
94
94
  }),
95
95
  enabled: true,
96
+ readyState: 'live',
97
+ stop: () => {
98
+ track.readyState = 'eneded';
99
+ },
96
100
  };
97
101
  return {
98
102
  getVideoTracks: () => [track],
@@ -53,6 +53,7 @@ describe('DynascaleManager', () => {
53
53
  call.state.updateOrAddParticipant('session-id', {
54
54
  userId: 'user-id',
55
55
  sessionId: 'session-id',
56
+ publishedTracks: [],
56
57
  });
57
58
 
58
59
  const element = document.createElement('div');
@@ -113,6 +114,7 @@ describe('DynascaleManager', () => {
113
114
  call.state.updateOrAddParticipant('session-id', {
114
115
  userId: 'user-id',
115
116
  sessionId: 'session-id',
117
+ publishedTracks: [],
116
118
  });
117
119
 
118
120
  // @ts-ignore
@@ -120,6 +122,7 @@ describe('DynascaleManager', () => {
120
122
  userId: 'user-id-local',
121
123
  sessionId: 'session-id-local',
122
124
  isLocalParticipant: true,
125
+ publishedTracks: [],
123
126
  });
124
127
 
125
128
  const cleanup = dynascaleManager.bindAudioElement(
@@ -86,6 +86,15 @@ export class Publisher {
86
86
  [TrackType.UNSPECIFIED]: undefined,
87
87
  };
88
88
 
89
+ /**
90
+ * A map keeping track of track types that were published to the SFU.
91
+ * This map shouldn't be cleared when unpublishing a track, as it is used
92
+ * to determine whether a track was published before.
93
+ *
94
+ * @private
95
+ */
96
+ private readonly trackTypePublishHistory = new Map<TrackType, boolean>();
97
+
89
98
  private readonly isDtxEnabled: boolean;
90
99
  private readonly isRedEnabled: boolean;
91
100
  private readonly preferredVideoCodec?: string;
@@ -253,6 +262,9 @@ export class Publisher {
253
262
  // by an external factor as permission revokes, device disconnected, etc.
254
263
  // keep in mind that `track.stop()` doesn't trigger this event.
255
264
  track.addEventListener('ended', handleTrackEnded);
265
+ if (!track.enabled) {
266
+ track.enabled = true;
267
+ }
256
268
 
257
269
  transceiver = this.pc.addTransceiver(track, {
258
270
  direction: 'sendonly',
@@ -266,6 +278,7 @@ export class Publisher {
266
278
  logger('debug', `Added ${TrackType[trackType]} transceiver`);
267
279
  this.transceiverInitOrder.push(trackType);
268
280
  this.transceiverRegistry[trackType] = transceiver;
281
+ this.trackTypePublishHistory.set(trackType, true);
269
282
 
270
283
  if ('setCodecPreferences' in transceiver && codecPreferences) {
271
284
  logger(
@@ -310,17 +323,24 @@ export class Publisher {
310
323
  if (
311
324
  transceiver &&
312
325
  transceiver.sender.track &&
313
- transceiver.sender.track.readyState === 'live'
326
+ (stopTrack
327
+ ? transceiver.sender.track.readyState === 'live'
328
+ : transceiver.sender.track.enabled)
314
329
  ) {
315
330
  stopTrack
316
331
  ? transceiver.sender.track.stop()
317
332
  : (transceiver.sender.track.enabled = false);
318
- return this.notifyTrackMuteStateChanged(
319
- undefined,
320
- transceiver.sender.track,
321
- trackType,
322
- true,
323
- );
333
+ // We don't need to notify SFU if unpublishing in response to remote soft mute
334
+ if (!this.state.localParticipant?.publishedTracks.includes(trackType)) {
335
+ return;
336
+ } else {
337
+ return this.notifyTrackMuteStateChanged(
338
+ undefined,
339
+ transceiver.sender.track,
340
+ trackType,
341
+ true,
342
+ );
343
+ }
324
344
  }
325
345
  };
326
346
 
@@ -330,6 +350,35 @@ export class Publisher {
330
350
  * @param trackType the track type to check.
331
351
  */
332
352
  isPublishing = (trackType: TrackType): boolean => {
353
+ const transceiverForTrackType = this.transceiverRegistry[trackType];
354
+ if (transceiverForTrackType && transceiverForTrackType.sender) {
355
+ const sender = transceiverForTrackType.sender;
356
+ return (
357
+ !!sender.track &&
358
+ sender.track.readyState === 'live' &&
359
+ sender.track.enabled
360
+ );
361
+ }
362
+ return false;
363
+ };
364
+
365
+ /**
366
+ * Returns true if the given track type was ever published to the SFU.
367
+ * Contrary to `isPublishing`, this method returns true if a certain
368
+ * track type was published before, even if it is currently unpublished.
369
+ *
370
+ * @param trackType the track type to check.
371
+ */
372
+ hasEverPublished = (trackType: TrackType): boolean => {
373
+ return this.trackTypePublishHistory.get(trackType) ?? false;
374
+ };
375
+
376
+ /**
377
+ * Returns true if the given track type is currently live
378
+ *
379
+ * @param trackType the track type to check.
380
+ */
381
+ isLive = (trackType: TrackType): boolean => {
333
382
  const transceiverForTrackType = this.transceiverRegistry[trackType];
334
383
  if (transceiverForTrackType && transceiverForTrackType.sender) {
335
384
  const sender = transceiverForTrackType.sender;
@@ -180,6 +180,7 @@ describe('Publisher', () => {
180
180
 
181
181
  expect(state.localParticipant?.videoDeviceId).toEqual('test-device-id');
182
182
  expect(state.localParticipant?.publishedTracks).toContain(TrackType.VIDEO);
183
+ expect(track.enabled).toBe(true);
183
184
  expect(state.localParticipant?.videoStream).toEqual(mediaStream);
184
185
  expect(transceiver.setCodecPreferences).toHaveBeenCalled();
185
186
  expect(sfuClient.updateMuteState).toHaveBeenCalledWith(
package/src/rtc/codecs.ts CHANGED
@@ -1,5 +1,3 @@
1
- import { isReactNative } from '../helpers/platforms';
2
- import { removeCodec, setPreferredCodec } from '../helpers/sdp-munging';
3
1
  import { getLogger } from '../logger';
4
2
 
5
3
  export const getPreferredCodecs = (