@webex/plugin-meetings 3.0.0-beta.86 → 3.0.0-beta.88

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/dist/breakouts/breakout.js +1 -1
  2. package/dist/breakouts/index.js +13 -7
  3. package/dist/breakouts/index.js.map +1 -1
  4. package/dist/index.js +63 -1
  5. package/dist/index.js.map +1 -1
  6. package/dist/media/index.js.map +1 -1
  7. package/dist/media/properties.js.map +1 -1
  8. package/dist/meeting/index.js +139 -88
  9. package/dist/meeting/index.js.map +1 -1
  10. package/dist/meeting/muteState.js +169 -26
  11. package/dist/meeting/muteState.js.map +1 -1
  12. package/dist/meeting/util.js.map +1 -1
  13. package/dist/meetings/index.js +20 -2
  14. package/dist/meetings/index.js.map +1 -1
  15. package/dist/types/controls-options-manager/util.d.ts +0 -102
  16. package/dist/types/index.d.ts +6 -5
  17. package/dist/types/media/properties.d.ts +1 -1
  18. package/dist/types/meeting/index.d.ts +8 -5
  19. package/dist/types/meeting/muteState.d.ts +58 -5
  20. package/dist/types/meetings/index.d.ts +1 -0
  21. package/dist/types/multistream/remoteMedia.d.ts +1 -29
  22. package/dist/types/multistream/remoteMediaGroup.d.ts +0 -9
  23. package/package.json +19 -18
  24. package/src/breakouts/index.ts +11 -4
  25. package/src/{index.js → index.ts} +15 -0
  26. package/src/media/index.ts +2 -7
  27. package/src/media/properties.ts +3 -7
  28. package/src/meeting/index.ts +108 -51
  29. package/src/meeting/muteState.ts +158 -15
  30. package/src/meeting/util.ts +1 -1
  31. package/src/meetings/index.ts +14 -0
  32. package/test/integration/spec/converged-space-meetings.js +4 -3
  33. package/test/integration/spec/journey.js +4 -3
  34. package/test/integration/spec/space-meeting.js +4 -3
  35. package/test/unit/spec/breakouts/index.ts +72 -52
  36. package/test/unit/spec/meeting/index.js +24 -28
  37. package/test/unit/spec/meeting/muteState.js +226 -13
  38. package/test/utils/integrationTestUtils.js +64 -0
  39. package/test/utils/testUtils.js +0 -57
@@ -3753,15 +3753,19 @@ describe('plugin-meetings', () => {
3753
3753
  audioTrack = {
3754
3754
  id: 'audio track',
3755
3755
  getSettings: sinon.stub().returns({}),
3756
+ on: sinon.stub(),
3757
+ off: sinon.stub(),
3756
3758
  };
3757
3759
  videoTrack = {
3758
3760
  id: 'video track',
3759
3761
  getSettings: sinon.stub().returns({}),
3762
+ on: sinon.stub(),
3763
+ off: sinon.stub(),
3760
3764
  };
3761
3765
  videoShareTrack = {
3762
3766
  id: 'share track',
3763
3767
  on: sinon.stub(),
3764
- removeEventListener: sinon.stub(),
3768
+ off: sinon.stub(),
3765
3769
  getSettings: sinon.stub().returns({}),
3766
3770
  };
3767
3771
  meeting.requestScreenShareFloor = sinon.stub().resolves({});
@@ -3771,6 +3775,7 @@ describe('plugin-meetings', () => {
3771
3775
  sendVideo: false,
3772
3776
  sendShare: false,
3773
3777
  };
3778
+ meeting.isMultistream = true;
3774
3779
  meeting.mediaProperties.webrtcMediaConnection = {
3775
3780
  publishTrack: sinon.stub().resolves({}),
3776
3781
  unpublishTrack: sinon.stub().resolves({}),
@@ -3814,10 +3819,7 @@ describe('plugin-meetings', () => {
3814
3819
  await assert.isRejected(meeting.publishTracks({audio: {id: 'some audio track'}}));
3815
3820
  });
3816
3821
 
3817
- const checkAudioPublished = () => {
3818
- assert.calledWith(MediaUtil.createMediaStream, [audioTrack]);
3819
- assert.calledOnce(LocalMicrophoneTrackConstructorStub);
3820
-
3822
+ const checkAudioPublished = (track) => {
3821
3823
  assert.calledWith(
3822
3824
  createMuteStateStub,
3823
3825
  'audio',
@@ -3826,16 +3828,13 @@ describe('plugin-meetings', () => {
3826
3828
  );
3827
3829
  assert.calledWith(
3828
3830
  meeting.mediaProperties.webrtcMediaConnection.publishTrack,
3829
- fakeLocalMicrophoneTrack
3831
+ track
3830
3832
  );
3831
- assert.equal(meeting.mediaProperties.audioTrack, fakeLocalMicrophoneTrack);
3833
+ assert.equal(meeting.mediaProperties.audioTrack, track);
3832
3834
  assert.equal(meeting.mediaProperties.mediaDirection.sendAudio, true);
3833
3835
  };
3834
3836
 
3835
- const checkVideoPublished = () => {
3836
- assert.calledWith(MediaUtil.createMediaStream, [videoTrack]);
3837
- assert.calledOnce(LocalCameraTrackConstructorStub);
3838
-
3837
+ const checkVideoPublished = (track) => {
3839
3838
  assert.calledWith(
3840
3839
  createMuteStateStub,
3841
3840
  'video',
@@ -3844,23 +3843,20 @@ describe('plugin-meetings', () => {
3844
3843
  );
3845
3844
  assert.calledWith(
3846
3845
  meeting.mediaProperties.webrtcMediaConnection.publishTrack,
3847
- fakeLocalCameraTrack
3846
+ track
3848
3847
  );
3849
- assert.equal(meeting.mediaProperties.videoTrack, fakeLocalCameraTrack);
3848
+ assert.equal(meeting.mediaProperties.videoTrack, track);
3850
3849
  assert.equal(meeting.mediaProperties.mediaDirection.sendVideo, true);
3851
3850
  };
3852
3851
 
3853
- const checkScreenShareVideoPublished = () => {
3852
+ const checkScreenShareVideoPublished = (track) => {
3854
3853
  assert.calledOnce(meeting.requestScreenShareFloor);
3855
3854
 
3856
- assert.calledWith(MediaUtil.createMediaStream, [videoShareTrack]);
3857
- assert.calledOnce(LocalDisplayTrackConstructorStub);
3858
-
3859
3855
  assert.calledWith(
3860
3856
  meeting.mediaProperties.webrtcMediaConnection.publishTrack,
3861
- fakeLocalDisplayTrack
3857
+ track
3862
3858
  );
3863
- assert.equal(meeting.mediaProperties.shareTrack, fakeLocalDisplayTrack);
3859
+ assert.equal(meeting.mediaProperties.shareTrack, track);
3864
3860
  assert.equal(meeting.mediaProperties.mediaDirection.sendShare, true);
3865
3861
  };
3866
3862
 
@@ -3868,7 +3864,7 @@ describe('plugin-meetings', () => {
3868
3864
  await meeting.publishTracks({screenShare: {video: videoShareTrack}});
3869
3865
 
3870
3866
  assert.calledOnce(meeting.mediaProperties.webrtcMediaConnection.publishTrack);
3871
- checkScreenShareVideoPublished();
3867
+ checkScreenShareVideoPublished(videoShareTrack);
3872
3868
  });
3873
3869
 
3874
3870
  it('creates MuteState instance and publishes the track for main audio', async () => {
@@ -3876,7 +3872,7 @@ describe('plugin-meetings', () => {
3876
3872
 
3877
3873
  assert.calledOnce(createMuteStateStub);
3878
3874
  assert.calledOnce(meeting.mediaProperties.webrtcMediaConnection.publishTrack);
3879
- checkAudioPublished();
3875
+ checkAudioPublished(audioTrack);
3880
3876
  });
3881
3877
 
3882
3878
  it('creates MuteState instance and publishes the track for main video', async () => {
@@ -3884,7 +3880,7 @@ describe('plugin-meetings', () => {
3884
3880
 
3885
3881
  assert.calledOnce(createMuteStateStub);
3886
3882
  assert.calledOnce(meeting.mediaProperties.webrtcMediaConnection.publishTrack);
3887
- checkVideoPublished();
3883
+ checkVideoPublished(videoTrack);
3888
3884
  });
3889
3885
 
3890
3886
  it('publishes audio, video and screen share together', async () => {
@@ -3898,9 +3894,9 @@ describe('plugin-meetings', () => {
3898
3894
 
3899
3895
  assert.calledTwice(createMuteStateStub);
3900
3896
  assert.calledThrice(meeting.mediaProperties.webrtcMediaConnection.publishTrack);
3901
- checkAudioPublished();
3902
- checkVideoPublished();
3903
- checkScreenShareVideoPublished();
3897
+ checkAudioPublished(audioTrack);
3898
+ checkVideoPublished(videoTrack);
3899
+ checkScreenShareVideoPublished(videoShareTrack);
3904
3900
  });
3905
3901
  });
3906
3902
 
@@ -3916,7 +3912,7 @@ describe('plugin-meetings', () => {
3916
3912
  const checkAudioUnpublished = () => {
3917
3913
  assert.calledWith(
3918
3914
  meeting.mediaProperties.webrtcMediaConnection.unpublishTrack,
3919
- fakeLocalMicrophoneTrack
3915
+ audioTrack
3920
3916
  );
3921
3917
 
3922
3918
  assert.equal(meeting.mediaProperties.audioTrack, null);
@@ -3926,7 +3922,7 @@ describe('plugin-meetings', () => {
3926
3922
  const checkVideoUnpublished = () => {
3927
3923
  assert.calledWith(
3928
3924
  meeting.mediaProperties.webrtcMediaConnection.unpublishTrack,
3929
- fakeLocalCameraTrack
3925
+ videoTrack
3930
3926
  );
3931
3927
 
3932
3928
  assert.equal(meeting.mediaProperties.videoTrack, null);
@@ -3936,7 +3932,7 @@ describe('plugin-meetings', () => {
3936
3932
  const checkScreenShareVideoUnpublished = () => {
3937
3933
  assert.calledWith(
3938
3934
  meeting.mediaProperties.webrtcMediaConnection.unpublishTrack,
3939
- fakeLocalDisplayTrack
3935
+ videoShareTrack
3940
3936
  );
3941
3937
 
3942
3938
  assert.calledOnce(meeting.requestScreenShareFloor);
@@ -2,7 +2,6 @@ import sinon from 'sinon';
2
2
  import {assert} from '@webex/test-helper-chai';
3
3
  import MeetingUtil from '@webex/plugin-meetings/src/meeting/util';
4
4
  import {createMuteState} from '@webex/plugin-meetings/src/meeting/muteState';
5
- import Media from '@webex/plugin-meetings/src/media/index';
6
5
  import PermissionError from '@webex/plugin-meetings/src/common/errors/permission';
7
6
  import {AUDIO, VIDEO} from '@webex/plugin-meetings/src/constants';
8
7
 
@@ -16,11 +15,20 @@ describe('plugin-meetings', () => {
16
15
 
17
16
  const fakeLocus = {info: 'this is a fake locus'};
18
17
 
18
+ const createFakeLocalTrack = (id) => {
19
+ return {
20
+ id,
21
+ setMuted: sinon.stub(),
22
+ setServerMuted: sinon.stub(),
23
+ setUnmuteAllowed: sinon.stub(),
24
+ };
25
+ };
26
+
19
27
  beforeEach(() => {
20
28
  meeting = {
21
29
  mediaProperties: {
22
- audioTrack: {id: 'fake audio track', setMuted: sinon.stub()},
23
- videoTrack: {id: 'fake video track', setMuted: sinon.stub()},
30
+ audioTrack: createFakeLocalTrack('fake audio track'),
31
+ videoTrack: createFakeLocalTrack('fake video track'),
24
32
  },
25
33
  remoteMuted: false,
26
34
  unmuteAllowed: true,
@@ -35,8 +43,9 @@ describe('plugin-meetings', () => {
35
43
  muteMember: sinon.stub().resolves(),
36
44
  },
37
45
  };
38
- audio = createMuteState(AUDIO, meeting, {sendAudio: true});
39
- video = createMuteState(VIDEO, meeting, {sendVideo: true});
46
+
47
+ audio = createMuteState(AUDIO, meeting, {sendAudio: true}, true);
48
+ video = createMuteState(VIDEO, meeting, {sendVideo: true}, true);
40
49
 
41
50
  originalRemoteUpdateAudioVideo = MeetingUtil.remoteUpdateAudioVideo;
42
51
 
@@ -49,8 +58,8 @@ describe('plugin-meetings', () => {
49
58
 
50
59
  describe('mute state library', () => {
51
60
  it('does not create an audio instance if we are not sending audio', async () => {
52
- assert.isNull(createMuteState(AUDIO, meeting, {sendAudio: false}));
53
- assert.isNull(createMuteState(AUDIO, meeting, {}));
61
+ assert.isNull(createMuteState(AUDIO, meeting, {sendAudio: false}, true));
62
+ assert.isNull(createMuteState(AUDIO, meeting, {}, true));
54
63
  });
55
64
 
56
65
  it('does not create a video instance if we are not sending video', async () => {
@@ -94,7 +103,7 @@ describe('plugin-meetings', () => {
94
103
  assert.isFalse(audio.isMuted());
95
104
 
96
105
  // simulate remote mute
97
- audio.handleServerRemoteMuteUpdate(true, true);
106
+ audio.handleServerRemoteMuteUpdate(meeting, true, true);
98
107
 
99
108
  assert.isTrue(audio.isMuted());
100
109
  assert.isFalse(audio.isSelf());
@@ -268,7 +277,7 @@ describe('plugin-meetings', () => {
268
277
 
269
278
  it('does remote unmute when unmuting and remote mute is on', async () => {
270
279
  // simulate remote mute
271
- audio.handleServerRemoteMuteUpdate(true, true);
280
+ audio.handleServerRemoteMuteUpdate(meeting, true, true);
272
281
 
273
282
  // unmute
274
283
  await audio.handleClientRequest(meeting, false);
@@ -283,7 +292,7 @@ describe('plugin-meetings', () => {
283
292
 
284
293
  it('does video remote unmute when unmuting and remote mute is on', async () => {
285
294
  // simulate remote mute
286
- video.handleServerRemoteMuteUpdate(true, true);
295
+ video.handleServerRemoteMuteUpdate(meeting, true, true);
287
296
 
288
297
  // unmute
289
298
  await video.handleClientRequest(meeting, false);
@@ -298,7 +307,7 @@ describe('plugin-meetings', () => {
298
307
 
299
308
  it('does not video remote unmute when unmuting and remote mute is off', async () => {
300
309
  // simulate remote mute
301
- video.handleServerRemoteMuteUpdate(false, true);
310
+ video.handleServerRemoteMuteUpdate(meeting, false, true);
302
311
 
303
312
  // unmute
304
313
  await video.handleClientRequest(meeting, false);
@@ -348,7 +357,7 @@ describe('plugin-meetings', () => {
348
357
 
349
358
  it('rejects client request promise if server request for remote mute fails', async () => {
350
359
  // we only send remote mute requests when we're unmuting, so first we need to do a remote mute
351
- audio.handleServerRemoteMuteUpdate(true, true);
360
+ audio.handleServerRemoteMuteUpdate(meeting, true, true);
352
361
 
353
362
  // setup the stub to simulate server error response
354
363
  meeting.members.muteMember = sinon.stub().rejects();
@@ -433,7 +442,7 @@ describe('plugin-meetings', () => {
433
442
  });
434
443
 
435
444
  it('rejects client request to unmute if hard mute is used', (done) => {
436
- audio.handleServerRemoteMuteUpdate(true, false);
445
+ audio.handleServerRemoteMuteUpdate(meeting, true, false);
437
446
 
438
447
  audio
439
448
  .handleClientRequest(meeting, false)
@@ -500,3 +509,207 @@ describe('plugin-meetings', () => {
500
509
  });
501
510
  });
502
511
  });
512
+
513
+ describe('#init, #handleLocalTrackChange', () => {
514
+ let meeting;
515
+ let muteState;
516
+ let setServerMutedSpy;
517
+ let setMutedSpy, setUnmuteAllowedSpy;
518
+ const fakeLocus = {info: 'this is a fake locus'};
519
+
520
+ const createFakeLocalTrack = (id, muted) => {
521
+ return {
522
+ id,
523
+ setMuted: sinon.stub(),
524
+ setServerMuted: sinon.stub(),
525
+ setUnmuteAllowed: sinon.stub(),
526
+ muted,
527
+ };
528
+ };
529
+
530
+ const setup = (mediaType, remoteMuted = false, muted = false, defineTracks = true) => {
531
+
532
+ const remoteMuteField = mediaType === AUDIO ? 'remoteMuted' : 'remoteVideoMuted';
533
+
534
+ meeting = {
535
+ mediaProperties: {
536
+ audioTrack: defineTracks ? createFakeLocalTrack('fake audio track', muted) : undefined,
537
+ videoTrack: defineTracks ? createFakeLocalTrack('fake video track', muted) : undefined,
538
+ },
539
+ [remoteMuteField]: remoteMuted,
540
+ unmuteAllowed: true,
541
+ unmuteVideoAllowed: true,
542
+
543
+ locusInfo: {
544
+ onFullLocus: sinon.stub(),
545
+ },
546
+ members: {
547
+ selfId: 'fake self id',
548
+ muteMember: sinon.stub().resolves(),
549
+ },
550
+ };
551
+
552
+ const direction = mediaType === AUDIO ? {sendAudio: true} : {sendVideo: true};
553
+ sinon.spy(MeetingUtil, 'remoteUpdateAudioVideo');
554
+ muteState = createMuteState(mediaType, meeting, direction, false);
555
+ }
556
+
557
+ const setupSpies = (mediaType) => {
558
+ setUnmuteAllowedSpy = mediaType === AUDIO ? meeting.mediaProperties.audioTrack?.setUnmuteAllowed : meeting.mediaProperties.videoTrack?.setUnmuteAllowed;
559
+ setServerMutedSpy = mediaType === AUDIO ? meeting.mediaProperties.audioTrack?.setServerMuted : meeting.mediaProperties.videoTrack?.setServerMuted;
560
+ setMutedSpy = mediaType === AUDIO ? meeting.mediaProperties.audioTrack?.setMuted : meeting.mediaProperties.videoTrack?.setMuted;
561
+
562
+ clearSpies();
563
+ };
564
+
565
+ const clearSpies = () => {
566
+ setUnmuteAllowedSpy?.resetHistory();
567
+ setServerMutedSpy?.resetHistory();
568
+ setMutedSpy?.resetHistory();
569
+ };
570
+ const tests = [
571
+ {mediaType: AUDIO, title: 'audio'},
572
+ {mediaType: VIDEO, title: 'video'}
573
+ ];
574
+
575
+ tests.forEach(({mediaType, title}) =>
576
+ describe(title, () => {
577
+
578
+ afterEach(() => {
579
+ sinon.restore();
580
+ });
581
+
582
+ describe('#handleLocalTrackChange',() => {
583
+
584
+ it('calls init()', async () => {
585
+ setup(mediaType);
586
+ const spy = sinon.spy(muteState, 'init');
587
+ muteState.handleLocalTrackChange(meeting);
588
+ assert.calledOnceWithExactly(spy, meeting);
589
+ });
590
+ });
591
+
592
+ describe('#init', () => {
593
+
594
+ afterEach(() => {
595
+ sinon.restore();
596
+ });
597
+
598
+ it('nothing goes bad when track is undefined', async () => {
599
+ setup(mediaType, false, false, false);
600
+ setupSpies(mediaType);
601
+
602
+ muteState.init(meeting);
603
+
604
+ assert.isFalse(muteState.state.client.localMute);
605
+ });
606
+
607
+ it('tests when track muted is true and remoteMuted is false', async () => {
608
+ setup(mediaType, false, true);
609
+ setupSpies(mediaType);
610
+
611
+ muteState.init(meeting);
612
+
613
+ assert.calledWith(setUnmuteAllowedSpy, muteState.state.server.unmuteAllowed);
614
+ assert.notCalled(setServerMutedSpy);
615
+ assert.calledOnce(MeetingUtil.remoteUpdateAudioVideo);
616
+ assert.isTrue(muteState.state.client.localMute);
617
+ });
618
+
619
+
620
+ it('tests when track muted is false and remoteMuted is false', async () => {
621
+ setup(mediaType, false, false);
622
+ setupSpies(mediaType);
623
+ muteState.state.server.localMute = true;
624
+
625
+ muteState.init(meeting);
626
+
627
+ assert.calledWith(setUnmuteAllowedSpy, muteState.state.server.unmuteAllowed);
628
+ assert.notCalled(setServerMutedSpy);
629
+ assert.calledOnce(MeetingUtil.remoteUpdateAudioVideo);
630
+ assert.isFalse(muteState.state.client.localMute);
631
+ });
632
+
633
+ it('tests when remoteMuted is true', async () => {
634
+ // testing that muteLocalTrack is called
635
+ setup(mediaType, true);
636
+ setupSpies(mediaType);
637
+
638
+ muteState.init(meeting);
639
+
640
+ assert.calledWith(setUnmuteAllowedSpy, muteState.state.server.unmuteAllowed);
641
+ assert.calledOnceWithExactly(setServerMutedSpy, true, 'remotelyMuted');
642
+ });
643
+ });
644
+
645
+ describe('#handleLocalTrackMuteStateChange', () => {
646
+
647
+ afterEach(() => {
648
+ sinon.restore();
649
+ });
650
+
651
+ it('checks when ignoreMuteStateChange is true nothing changes', () => {
652
+ setup(mediaType, false, false);
653
+ muteState.ignoreMuteStateChange= true;
654
+
655
+ muteState.handleLocalTrackMuteStateChange(meeting, true);
656
+ assert.notCalled(MeetingUtil.remoteUpdateAudioVideo);
657
+
658
+ assert.isFalse(muteState.state.client.localMute);
659
+ });
660
+
661
+ it('tests localMute - true to false', () => {
662
+ setup(mediaType, false, true);
663
+
664
+ muteState.handleLocalTrackMuteStateChange(meeting, false);
665
+ assert.equal(muteState.state.client.localMute, false);
666
+ assert.called(MeetingUtil.remoteUpdateAudioVideo);
667
+ });
668
+
669
+ it('tests localMute - false to true', () => {
670
+ setup(mediaType, false, false);
671
+
672
+ muteState.handleLocalTrackMuteStateChange(meeting, true);
673
+ assert.equal(muteState.state.client.localMute, true);
674
+ assert.called(MeetingUtil.remoteUpdateAudioVideo);
675
+ });
676
+ });
677
+
678
+ describe('#applyClientStateLocally', () => {
679
+
680
+ afterEach(() => {
681
+ sinon.restore();
682
+ });
683
+
684
+ it('checks when sdkOwnsLocalTrack is false', () => {
685
+ setup(mediaType);
686
+ setupSpies(mediaType);
687
+ muteState.sdkOwnsLocalTrack= false;
688
+
689
+ muteState.applyClientStateLocally(meeting, 'somereason');
690
+ assert.calledOnceWithExactly(setServerMutedSpy, muteState.state.client.localMute, 'somereason');
691
+ assert.notCalled(setMutedSpy);
692
+ });
693
+
694
+ it('checks when sdkOwnsLocalTrack is true', () => {
695
+ setup(mediaType);
696
+ setupSpies(mediaType);
697
+ muteState.sdkOwnsLocalTrack= true;
698
+
699
+ muteState.applyClientStateLocally(meeting, 'somereason');
700
+ assert.notCalled(setServerMutedSpy);
701
+ assert.calledOnceWithExactly(setMutedSpy, muteState.state.client.localMute);
702
+ });
703
+
704
+ it('checks nothing explodes when tracks are undefined', () => {
705
+ setup(mediaType, false, false, false);
706
+ setupSpies(mediaType);
707
+ muteState.sdkOwnsLocalTrack= true;
708
+
709
+ muteState.applyClientStateLocally(meeting, 'somereason');
710
+ });
711
+ });
712
+
713
+ })
714
+ );
715
+ });
@@ -0,0 +1,64 @@
1
+ import {assert} from '@webex/test-helper-chai';
2
+ import {Defer} from '@webex/common';
3
+ import {LocalCameraTrack, LocalMicrophoneTrack} from '@webex/plugin-meetings';
4
+
5
+ const addMedia = async (user, options = {}) => {
6
+ const [localStream, localShare] = await user.meeting
7
+ .getMediaStreams({
8
+ sendAudio: true,
9
+ sendVideo: true,
10
+ sendShare: false,
11
+ });
12
+
13
+ if (options.multistream) {
14
+ await user.meeting.addMedia({});
15
+ await user.meeting.publishTracks({microphone: new LocalMicrophoneTrack(new MediaStream([localStream.getAudioTracks()?.[0]])), camera: new LocalCameraTrack(new MediaStream([localStream.getVideoTracks()?.[0]]))});
16
+
17
+ } else {
18
+ const mediaReadyPromises = Array.isArray(options.expectedMediaReadyTypes)
19
+ ? options.expectedMediaReadyTypes.reduce((output, expectedMediaReadyType) => {
20
+ if (typeof expectedMediaReadyType !== 'string') {
21
+ return output;
22
+ }
23
+
24
+ output[expectedMediaReadyType] = new Defer();
25
+
26
+ return output;
27
+ }, {})
28
+ : {local: new Defer(), remoteAudio: new Defer(), remoteVideo: new Defer()};
29
+
30
+ const mediaReady = (media) => {
31
+ if (!media) {
32
+ return;
33
+ }
34
+ if (mediaReadyPromises[media.type]) {
35
+ mediaReadyPromises[media.type].resolve();
36
+ }
37
+ };
38
+
39
+ user.meeting.on('media:ready', mediaReady);
40
+
41
+ await user.meeting.addMedia({
42
+ mediaSettings: {
43
+ sendAudio: true,
44
+ sendVideo: true,
45
+ sendShare: false,
46
+ receiveShare: true,
47
+ receiveAudio: true,
48
+ receiveVideo: true,
49
+ },
50
+ localShare,
51
+ localStream,
52
+ });
53
+ await Promise.all(Object.values(mediaReadyPromises).map((defer) => defer.promise));
54
+ };
55
+
56
+
57
+ assert.exists(user.meeting.mediaProperties.audioTrack, 'audioTrack not present');
58
+ assert.exists(user.meeting.mediaProperties.videoTrack, 'videoTrack not present');
59
+
60
+ };
61
+
62
+ export default {
63
+ addMedia
64
+ };
@@ -1,5 +1,3 @@
1
- import {assert} from '@webex/test-helper-chai';
2
- import {Defer} from '@webex/common';
3
1
 
4
2
  const max = 30000;
5
3
  const waitForSpy = (spy, event) => {
@@ -195,61 +193,7 @@ const delayedTest = (callback, timeout) =>
195
193
  }, timeout);
196
194
  });
197
195
 
198
- const addMedia = async (user, options = {}) => {
199
- const mediaReadyPromises = Array.isArray(options.expectedMediaReadyTypes)
200
- ? options.expectedMediaReadyTypes.reduce((output, expectedMediaReadyType) => {
201
- if (typeof expectedMediaReadyType !== 'string') {
202
- return output;
203
- }
204
-
205
- output[expectedMediaReadyType] = new Defer();
206
-
207
- return output;
208
- }, {})
209
- : {local: new Defer(), remoteAudio: new Defer(), remoteVideo: new Defer()};
210
-
211
- const mediaReady = (media) => {
212
- if (!media) {
213
- return;
214
- }
215
- if (mediaReadyPromises[media.type]) {
216
- mediaReadyPromises[media.type].resolve();
217
- }
218
- };
219
-
220
- user.meeting.on('media:ready', mediaReady);
221
196
 
222
- const [localStream, localShare] = await user.meeting
223
- .getMediaStreams({
224
- sendAudio: true,
225
- sendVideo: true,
226
- sendShare: false,
227
- });
228
-
229
- if (options.multistream) {
230
- await user.meeting.addMedia({});
231
-
232
- await user.meeting.publishTracks({microphone: localStream.getAudioTracks()[0], camera: localStream.getVideoTracks()[0]})
233
- } else {
234
- await user.meeting.addMedia({
235
- mediaSettings: {
236
- sendAudio: true,
237
- sendVideo: true,
238
- sendShare: false,
239
- receiveShare: true,
240
- receiveAudio: true,
241
- receiveVideo: true,
242
- },
243
- localShare,
244
- localStream,
245
- });
246
- }
247
-
248
- await Promise.all(Object.values(mediaReadyPromises).map((defer) => defer.promise));
249
-
250
- assert.exists(user.meeting.mediaProperties.audioTrack, 'audioTrack not present');
251
- assert.exists(user.meeting.mediaProperties.videoTrack, 'videoTrack not present');
252
- };
253
197
 
254
198
  const waitUntil = (waitTime) =>
255
199
  new Promise((resolve) => {
@@ -291,7 +235,6 @@ export default {
291
235
  waitForEvents,
292
236
  checkParticipantUpdatedStatus,
293
237
  delayedPromise,
294
- addMedia,
295
238
  waitUntil,
296
239
  delayedTest,
297
240
  flushPromises,