@webex/plugin-meetings 3.0.0-beta.397 → 3.0.0-beta.399

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -150,15 +150,30 @@ export class MuteState {
150
150
  * @param {Boolean} [mute] true for muting, false for unmuting request
151
151
  * @returns {void}
152
152
  */
153
- public handleLocalStreamMuteStateChange(meeting?: object, mute?: boolean) {
153
+ public handleLocalStreamMuteStateChange(meeting?: any) {
154
154
  if (this.ignoreMuteStateChange) {
155
155
  return;
156
156
  }
157
+
158
+ // either user or system may have triggered a mute state change, but localMute should reflect both
159
+ let newMuteState: boolean;
160
+ let userMuteState: boolean;
161
+ let systemMuteState: boolean;
162
+ if (this.type === AUDIO) {
163
+ newMuteState = meeting.mediaProperties.audioStream?.muted;
164
+ userMuteState = meeting.mediaProperties.audioStream?.userMuted;
165
+ systemMuteState = meeting.mediaProperties.audioStream?.systemMuted;
166
+ } else {
167
+ newMuteState = meeting.mediaProperties.videoStream?.muted;
168
+ userMuteState = meeting.mediaProperties.videoStream?.userMuted;
169
+ systemMuteState = meeting.mediaProperties.videoStream?.systemMuted;
170
+ }
171
+
157
172
  LoggerProxy.logger.info(
158
- `Meeting:muteState#handleLocalStreamMuteStateChange --> ${this.type}: local stream new mute state: ${mute}`
173
+ `Meeting:muteState#handleLocalStreamMuteStateChange --> ${this.type}: local stream new mute state: ${newMuteState} (user mute: ${userMuteState}, system mute: ${systemMuteState})`
159
174
  );
160
175
 
161
- this.state.client.localMute = mute;
176
+ this.state.client.localMute = newMuteState;
162
177
 
163
178
  this.applyClientStateToServer(meeting);
164
179
  }
@@ -249,7 +264,12 @@ export class MuteState {
249
264
  `Meeting:muteState#applyClientStateToServer --> ${this.type}: error: ${e}`
250
265
  );
251
266
 
252
- this.applyServerMuteToLocalStream(meeting, 'clientRequestFailed');
267
+ // failed to apply client state to server, so revert stream mute state to server state
268
+ this.muteLocalStream(
269
+ meeting,
270
+ this.state.server.localMute || this.state.server.remoteMute,
271
+ 'clientRequestFailed'
272
+ );
253
273
  });
254
274
  }
255
275
 
@@ -325,18 +345,6 @@ export class MuteState {
325
345
  });
326
346
  }
327
347
 
328
- /** Sets the mute state of the local stream according to what server thinks is our state
329
- * @param {Object} meeting - the meeting object
330
- * @param {ServerMuteReason} serverMuteReason - reason why we're applying server mute to the local stream
331
- * @returns {void}
332
- */
333
- private applyServerMuteToLocalStream(meeting: any, serverMuteReason: ServerMuteReason) {
334
- const muted = this.state.server.localMute || this.state.server.remoteMute;
335
-
336
- // update the local stream mute state, but not this.state.client.localMute
337
- this.muteLocalStream(meeting, muted, serverMuteReason);
338
- }
339
-
340
348
  /** Applies the current value for unmute allowed to the underlying stream
341
349
  *
342
350
  * @param {Meeting} meeting
@@ -371,7 +379,7 @@ export class MuteState {
371
379
  }
372
380
  if (muted !== undefined) {
373
381
  this.state.server.remoteMute = muted;
374
- this.applyServerMuteToLocalStream(meeting, 'remotelyMuted');
382
+ this.muteLocalStream(meeting, muted, 'remotelyMuted');
375
383
  }
376
384
  }
377
385
 
@@ -383,7 +391,7 @@ export class MuteState {
383
391
  * @param {Object} [meeting] the meeting object
384
392
  * @returns {undefined}
385
393
  */
386
- public handleServerLocalUnmuteRequired(meeting?: object) {
394
+ public handleServerLocalUnmuteRequired(meeting?: any) {
387
395
  if (!this.state.client.enabled) {
388
396
  LoggerProxy.logger.warn(
389
397
  `Meeting:muteState#handleServerLocalUnmuteRequired --> ${this.type}: localAudioUnmuteRequired received while ${this.type} is disabled -> local unmute will not result in ${this.type} being sent`
@@ -396,9 +404,15 @@ export class MuteState {
396
404
 
397
405
  // todo: I'm seeing "you can now unmute yourself " popup when this happens - but same thing happens on web.w.c so we can ignore for now
398
406
  this.state.server.remoteMute = false;
399
- this.state.client.localMute = false;
400
407
 
401
- this.applyClientStateLocally(meeting, 'localUnmuteRequired');
408
+ // change user mute state to false, but keep localMute true if overall mute state is still true
409
+ this.muteLocalStream(meeting, false, 'localUnmuteRequired');
410
+ if (this.type === AUDIO) {
411
+ this.state.client.localMute = meeting.mediaProperties.audioStream?.muted;
412
+ } else {
413
+ this.state.client.localMute = meeting.mediaProperties.videoStream?.muted;
414
+ }
415
+
402
416
  this.applyClientStateToServer(meeting);
403
417
  }
404
418
 
@@ -432,11 +432,11 @@ skipInNode(describe)('plugin-meetings', () => {
432
432
  {scope: bob.meeting.members, event: 'members:update', match: checkEvent},
433
433
  ]);
434
434
 
435
- localStreams.alice.microphone.setMuted(true);
435
+ localStreams.alice.microphone.setUserMuted(true);
436
436
 
437
437
  await membersUpdate;
438
438
 
439
- assert.equal(localStreams.alice.microphone.muted, true);
439
+ assert.equal(localStreams.alice.microphone.userMuted, true);
440
440
  });
441
441
 
442
442
  it('alice Audio unMute ', async () => {
@@ -451,11 +451,11 @@ skipInNode(describe)('plugin-meetings', () => {
451
451
  {scope: bob.meeting.members, event: 'members:update', match: checkEvent},
452
452
  ]);
453
453
 
454
- localStreams.alice.microphone.setMuted(false);
454
+ localStreams.alice.microphone.setUserMuted(false);
455
455
 
456
456
  await membersUpdate;
457
457
 
458
- assert.equal(localStreams.alice.microphone.muted, false);
458
+ assert.equal(localStreams.alice.microphone.userMuted, false);
459
459
  });
460
460
 
461
461
  it('alice video mute', async () => {
@@ -470,11 +470,11 @@ skipInNode(describe)('plugin-meetings', () => {
470
470
  {scope: bob.meeting.members, event: 'members:update', match: checkEvent},
471
471
  ]);
472
472
 
473
- localStreams.alice.camera.setMuted(true);
473
+ localStreams.alice.camera.setUserMuted(true);
474
474
 
475
475
  await membersUpdate;
476
476
 
477
- assert.equal(localStreams.alice.camera.muted, true);
477
+ assert.equal(localStreams.alice.camera.userMuted, true);
478
478
  });
479
479
 
480
480
  it('alice video unmute', async () => {
@@ -489,11 +489,11 @@ skipInNode(describe)('plugin-meetings', () => {
489
489
  {scope: bob.meeting.members, event: 'members:update', match: checkEvent},
490
490
  ]);
491
491
 
492
- localStreams.alice.camera.setMuted(false);
492
+ localStreams.alice.camera.setUserMuted(false);
493
493
 
494
494
  await membersUpdate;
495
495
 
496
- assert.equal(localStreams.alice.camera.muted, false);
496
+ assert.equal(localStreams.alice.camera.userMuted, false);
497
497
  });
498
498
 
499
499
  it('alice update Audio', async () => {
@@ -574,11 +574,11 @@ skipInNode(describe)('plugin-meetings', () => {
574
574
  ]);
575
575
 
576
576
  // first bob mutes himself
577
- localStreams.bob.microphone.setMuted(true);
577
+ localStreams.bob.microphone.setUserMuted(true);
578
578
 
579
579
  await membersUpdate;
580
580
 
581
- assert.equal(localStreams.bob.microphone.muted, true);
581
+ assert.equal(localStreams.bob.microphone.userMuted, true);
582
582
 
583
583
  // now alice tries to unmmute bob
584
584
  await testUtils.delayedPromise(alice.meeting.mute(bob.meeting.members.selfId, false))
@@ -593,7 +593,7 @@ skipInNode(describe)('plugin-meetings', () => {
593
593
  assert.fail('bob received unexpected meeting:self:unmutedByOthers event');
594
594
  })
595
595
  .catch(() => {
596
- assert.equal(localStreams.bob.microphone.muted, true);
596
+ assert.equal(localStreams.bob.microphone.userMuted, true);
597
597
  });
598
598
  });
599
599
 
@@ -607,11 +607,11 @@ skipInNode(describe)('plugin-meetings', () => {
607
607
  {scope: alice.meeting.members, event: 'members:update', match: checkEvent},
608
608
  ]);
609
609
 
610
- localStreams.bob.microphone.setMuted(false);
610
+ localStreams.bob.microphone.setUserMuted(false);
611
611
 
612
612
  await membersUpdate;
613
613
 
614
- assert.equal(localStreams.bob.microphone.muted, false);
614
+ assert.equal(localStreams.bob.microphone.userMuted, false);
615
615
  });
616
616
 
617
617
  it('alice shares the screen with highFrameRate', async () => {
@@ -43,7 +43,7 @@ import {
43
43
  RemoteTrackType,
44
44
  MediaType,
45
45
  } from '@webex/internal-media-core';
46
- import {StreamEventNames} from '@webex/media-helpers';
46
+ import {LocalStreamEventNames} from '@webex/media-helpers';
47
47
  import * as StatsAnalyzerModule from '@webex/plugin-meetings/src/statsAnalyzer';
48
48
  import EventsScope from '@webex/plugin-meetings/src/common/events/events-scope';
49
49
  import Meetings, {CONSTANTS} from '@webex/plugin-meetings';
@@ -99,7 +99,6 @@ import {
99
99
  MeetingInfoV2PolicyError,
100
100
  } from '../../../../src/meeting-info/meeting-info-v2';
101
101
  import {
102
- CLIENT_ERROR_CODE_TO_ERROR_PAYLOAD,
103
102
  DTLS_HANDSHAKE_FAILED_CLIENT_CODE,
104
103
  ICE_FAILED_WITHOUT_TURN_TLS_CLIENT_CODE,
105
104
  ICE_FAILED_WITH_TURN_TLS_CLIENT_CODE,
@@ -1548,7 +1547,6 @@ describe('plugin-meetings', () => {
1548
1547
  describe('#addMedia', () => {
1549
1548
  const muteStateStub = {
1550
1549
  handleClientRequest: sinon.stub().returns(Promise.resolve(true)),
1551
- applyClientStateLocally: sinon.stub().returns(Promise.resolve(true)),
1552
1550
  };
1553
1551
 
1554
1552
  let fakeMediaConnection;
@@ -3028,9 +3026,13 @@ describe('plugin-meetings', () => {
3028
3026
  getSettings: sinon.stub().returns({
3029
3027
  deviceId: 'some device id',
3030
3028
  }),
3031
- muted: false,
3029
+ userMuted: false,
3030
+ systemMuted: false,
3031
+ get muted() {
3032
+ return this.userMuted || this.systemMuted;
3033
+ },
3032
3034
  setUnmuteAllowed: sinon.stub(),
3033
- setMuted: sinon.stub(),
3035
+ setUserMuted: sinon.stub(),
3034
3036
  setServerMuted: sinon.stub(),
3035
3037
  outputStream: {
3036
3038
  getTracks: () => {
@@ -3265,28 +3267,50 @@ describe('plugin-meetings', () => {
3265
3267
  if (stream !== undefined) {
3266
3268
  switch (type) {
3267
3269
  case 'audio':
3268
- assert.calledOnceWithExactly(
3269
- meeting.sendSlotManager.getSlot(MediaType.AudioMain).publishStream,
3270
- stream
3271
- );
3270
+ if (stream?.readyState === 'ended') {
3271
+ assert.notCalled(meeting.sendSlotManager.getSlot(MediaType.AudioMain).publishStream);
3272
+ } else {
3273
+ assert.calledOnceWithExactly(
3274
+ meeting.sendSlotManager.getSlot(MediaType.AudioMain).publishStream,
3275
+ stream
3276
+ );
3277
+ }
3272
3278
  break;
3273
3279
  case 'video':
3274
- assert.calledOnceWithExactly(
3275
- meeting.sendSlotManager.getSlot(MediaType.VideoMain).publishStream,
3276
- stream
3277
- );
3280
+ if (stream?.readyState === 'ended') {
3281
+ assert.notCalled(
3282
+ meeting.sendSlotManager.getSlot(MediaType.VideoMain).publishStream
3283
+ );
3284
+ } else {
3285
+ assert.calledOnceWithExactly(
3286
+ meeting.sendSlotManager.getSlot(MediaType.VideoMain).publishStream,
3287
+ stream
3288
+ );
3289
+ }
3278
3290
  break;
3279
3291
  case 'screenShareAudio':
3280
- assert.calledOnceWithExactly(
3281
- meeting.sendSlotManager.getSlot(MediaType.AudioSlides).publishStream,
3282
- stream
3283
- );
3292
+ if (stream?.readyState === 'ended') {
3293
+ assert.notCalled(
3294
+ meeting.sendSlotManager.getSlot(MediaType.AudioSlides).publishStream
3295
+ );
3296
+ } else {
3297
+ assert.calledOnceWithExactly(
3298
+ meeting.sendSlotManager.getSlot(MediaType.AudioSlides).publishStream,
3299
+ stream
3300
+ );
3301
+ }
3284
3302
  break;
3285
3303
  case 'screenShareVideo':
3286
- assert.calledOnceWithExactly(
3287
- meeting.sendSlotManager.getSlot(MediaType.VideoSlides).publishStream,
3288
- stream
3289
- );
3304
+ if (stream?.readyState === 'ended') {
3305
+ assert.notCalled(
3306
+ meeting.sendSlotManager.getSlot(MediaType.VideoSlides).publishStream
3307
+ );
3308
+ } else {
3309
+ assert.calledOnceWithExactly(
3310
+ meeting.sendSlotManager.getSlot(MediaType.VideoSlides).publishStream,
3311
+ stream
3312
+ );
3313
+ }
3290
3314
  break;
3291
3315
  }
3292
3316
  }
@@ -3314,7 +3338,7 @@ describe('plugin-meetings', () => {
3314
3338
  }
3315
3339
  };
3316
3340
 
3317
- it('addMedia() works correctly when media is enabled without tracks to publish', async () => {
3341
+ it('addMedia() works correctly when media is enabled without streams to publish', async () => {
3318
3342
  await meeting.addMedia();
3319
3343
  await simulateRoapOffer();
3320
3344
  await simulateRoapOk();
@@ -3380,8 +3404,76 @@ describe('plugin-meetings', () => {
3380
3404
  assert.calledTwice(locusMediaRequestStub);
3381
3405
  });
3382
3406
 
3383
- it('addMedia() works correctly when media is enabled with tracks to publish and track is muted', async () => {
3384
- fakeMicrophoneStream.muted = true;
3407
+ it('addMedia() works correctly when media is enabled with streams to publish and stream is user muted', async () => {
3408
+ fakeMicrophoneStream.userMuted = true;
3409
+
3410
+ await meeting.addMedia({localStreams: {microphone: fakeMicrophoneStream}});
3411
+ await simulateRoapOffer();
3412
+ await simulateRoapOk();
3413
+
3414
+ // check RoapMediaConnection was created correctly
3415
+ checkMediaConnectionCreated({
3416
+ mediaConnectionConfig: expectedMediaConnectionConfig,
3417
+ localStreams: {
3418
+ audio: fakeMicrophoneStream,
3419
+ video: undefined,
3420
+ screenShareVideo: undefined,
3421
+ screenShareAudio: undefined,
3422
+ },
3423
+ direction: {
3424
+ audio: 'sendrecv',
3425
+ video: 'sendrecv',
3426
+ screenShare: 'recvonly',
3427
+ },
3428
+ remoteQualityLevel: 'HIGH',
3429
+ expectedDebugId,
3430
+ meetingId: meeting.id,
3431
+ });
3432
+ // and SDP offer was sent with the right audioMuted/videoMuted values
3433
+ checkSdpOfferSent({audioMuted: true, videoMuted: true});
3434
+ // check OK was sent with the right audioMuted/videoMuted values
3435
+ checkOkSent({audioMuted: true, videoMuted: true});
3436
+
3437
+ // and that these were the only /media requests that were sent
3438
+ assert.calledTwice(locusMediaRequestStub);
3439
+ });
3440
+
3441
+ it('addMedia() works correctly when media is enabled with tracks to publish and track is ended', async () => {
3442
+ fakeMicrophoneStream.readyState = 'ended';
3443
+
3444
+ await meeting.addMedia({localStreams: {microphone: fakeMicrophoneStream}});
3445
+ await simulateRoapOffer();
3446
+ await simulateRoapOk();
3447
+
3448
+ // check RoapMediaConnection was created correctly
3449
+ checkMediaConnectionCreated({
3450
+ mediaConnectionConfig: expectedMediaConnectionConfig,
3451
+ localStreams: {
3452
+ audio: undefined,
3453
+ video: undefined,
3454
+ screenShareVideo: undefined,
3455
+ screenShareAudio: undefined,
3456
+ },
3457
+ direction: {
3458
+ audio: 'sendrecv',
3459
+ video: 'sendrecv',
3460
+ screenShare: 'recvonly',
3461
+ },
3462
+ remoteQualityLevel: 'HIGH',
3463
+ expectedDebugId,
3464
+ meetingId: meeting.id,
3465
+ });
3466
+ // and SDP offer was sent with the right audioMuted/videoMuted values
3467
+ checkSdpOfferSent({audioMuted: true, videoMuted: true});
3468
+ // check OK was sent with the right audioMuted/videoMuted values
3469
+ checkOkSent({audioMuted: true, videoMuted: true});
3470
+
3471
+ // and that these were the only /media requests that were sent
3472
+ assert.calledTwice(locusMediaRequestStub);
3473
+ });
3474
+
3475
+ it('addMedia() works correctly when media is enabled with streams to publish and stream is system muted', async () => {
3476
+ fakeMicrophoneStream.systemMuted = true;
3385
3477
 
3386
3478
  await meeting.addMedia({localStreams: {microphone: fakeMicrophoneStream}});
3387
3479
  await simulateRoapOffer();
@@ -3414,7 +3506,7 @@ describe('plugin-meetings', () => {
3414
3506
  assert.calledTwice(locusMediaRequestStub);
3415
3507
  });
3416
3508
 
3417
- it('addMedia() works correctly when media is disabled with tracks to publish', async () => {
3509
+ it('addMedia() works correctly when media is disabled with streams to publish', async () => {
3418
3510
  await meeting.addMedia({
3419
3511
  localStreams: {microphone: fakeMicrophoneStream},
3420
3512
  audioEnabled: false,
@@ -3450,7 +3542,7 @@ describe('plugin-meetings', () => {
3450
3542
  assert.calledTwice(locusMediaRequestStub);
3451
3543
  });
3452
3544
 
3453
- it('addMedia() works correctly when media is disabled with no tracks to publish', async () => {
3545
+ it('addMedia() works correctly when media is disabled with no streams to publish', async () => {
3454
3546
  await meeting.addMedia({audioEnabled: false});
3455
3547
  await simulateRoapOffer();
3456
3548
  await simulateRoapOk();
@@ -3483,7 +3575,7 @@ describe('plugin-meetings', () => {
3483
3575
  assert.calledTwice(locusMediaRequestStub);
3484
3576
  });
3485
3577
 
3486
- it('addMedia() works correctly when video is disabled with no tracks to publish', async () => {
3578
+ it('addMedia() works correctly when video is disabled with no streams to publish', async () => {
3487
3579
  await meeting.addMedia({videoEnabled: false});
3488
3580
  await simulateRoapOffer();
3489
3581
  await simulateRoapOk();
@@ -3516,7 +3608,7 @@ describe('plugin-meetings', () => {
3516
3608
  assert.calledTwice(locusMediaRequestStub);
3517
3609
  });
3518
3610
 
3519
- it('addMedia() works correctly when screen share is disabled with no tracks to publish', async () => {
3611
+ it('addMedia() works correctly when screen share is disabled with no streams to publish', async () => {
3520
3612
  await meeting.addMedia({shareAudioEnabled: false, shareVideoEnabled: false});
3521
3613
  await simulateRoapOffer();
3522
3614
  await simulateRoapOk();
@@ -3617,9 +3709,13 @@ describe('plugin-meetings', () => {
3617
3709
  const fakeMicrophoneStream2 = {
3618
3710
  on: sinon.stub(),
3619
3711
  off: sinon.stub(),
3620
- muted: false,
3712
+ userMuted: false,
3713
+ systemMuted: false,
3714
+ get muted() {
3715
+ return this.userMuted || this.systemMuted;
3716
+ },
3621
3717
  setUnmuteAllowed: sinon.stub(),
3622
- setMuted: sinon.stub(),
3718
+ setUserMuted: sinon.stub(),
3623
3719
  outputStream: {
3624
3720
  getTracks: () => {
3625
3721
  return [
@@ -3856,12 +3952,55 @@ describe('plugin-meetings', () => {
3856
3952
  });
3857
3953
 
3858
3954
  [
3859
- {mute: true, title: 'muting a track before confluence is created'},
3860
- {mute: false, title: 'unmuting a track before confluence is created'},
3955
+ {mute: true, title: 'user muting a track before confluence is created'},
3956
+ {mute: false, title: 'user unmuting a track before confluence is created'},
3957
+ ].forEach(({mute, title}) =>
3958
+ it(title, async () => {
3959
+ // initialize the microphone mute state to opposite of what we do in the test
3960
+ fakeMicrophoneStream.userMuted = !mute;
3961
+
3962
+ await meeting.addMedia({localStreams: {microphone: fakeMicrophoneStream}});
3963
+ await stableState();
3964
+
3965
+ resetHistory();
3966
+
3967
+ assert.equal(
3968
+ fakeMicrophoneStream.on.getCall(0).args[0],
3969
+ LocalStreamEventNames.UserMuteStateChange
3970
+ );
3971
+ const mutedListener = fakeMicrophoneStream.on.getCall(0).args[1];
3972
+ // simulate track being muted
3973
+ fakeMicrophoneStream.userMuted = mute;
3974
+ mutedListener(mute);
3975
+
3976
+ await stableState();
3977
+
3978
+ // nothing should happen
3979
+ assert.notCalled(locusMediaRequestStub);
3980
+ assert.notCalled(fakeRoapMediaConnection.update);
3981
+
3982
+ // now simulate roap offer and ok
3983
+ await simulateRoapOffer();
3984
+ await simulateRoapOk();
3985
+
3986
+ // it should be sent with the right mute status
3987
+ checkSdpOfferSent({audioMuted: mute, videoMuted: true});
3988
+ // check OK was sent with the right audioMuted/videoMuted values
3989
+ checkOkSent({audioMuted: mute, videoMuted: true});
3990
+
3991
+ // nothing else should happen
3992
+ assert.calledTwice(locusMediaRequestStub);
3993
+ assert.notCalled(fakeRoapMediaConnection.update);
3994
+ })
3995
+ );
3996
+
3997
+ [
3998
+ {mute: true, title: 'system muting a track before confluence is created'},
3999
+ {mute: false, title: 'system unmuting a track before confluence is created'},
3861
4000
  ].forEach(({mute, title}) =>
3862
4001
  it(title, async () => {
3863
4002
  // initialize the microphone mute state to opposite of what we do in the test
3864
- fakeMicrophoneStream.muted = !mute;
4003
+ fakeMicrophoneStream.systemMuted = !mute;
3865
4004
 
3866
4005
  await meeting.addMedia({localStreams: {microphone: fakeMicrophoneStream}});
3867
4006
  await stableState();
@@ -3870,10 +4009,11 @@ describe('plugin-meetings', () => {
3870
4009
 
3871
4010
  assert.equal(
3872
4011
  fakeMicrophoneStream.on.getCall(0).args[0],
3873
- StreamEventNames.MuteStateChange
4012
+ LocalStreamEventNames.UserMuteStateChange
3874
4013
  );
3875
4014
  const mutedListener = fakeMicrophoneStream.on.getCall(0).args[1];
3876
4015
  // simulate track being muted
4016
+ fakeMicrophoneStream.systemMuted = mute;
3877
4017
  mutedListener(mute);
3878
4018
 
3879
4019
  await stableState();
@@ -6274,6 +6414,65 @@ describe('plugin-meetings', () => {
6274
6414
  checkScreenShareVideoPublished(videoShareStream);
6275
6415
  checkScreenShareAudioPublished(audioShareStream);
6276
6416
  });
6417
+
6418
+ [
6419
+ {
6420
+ endedStream: 'microphone',
6421
+ streams: {
6422
+ microphone: {
6423
+ readyState: 'ended',
6424
+ },
6425
+ camera: undefined,
6426
+ screenShare: {
6427
+ audio: undefined,
6428
+ video: undefined,
6429
+ },
6430
+ },
6431
+ },
6432
+ {
6433
+ endedStream: 'camera',
6434
+ streams: {
6435
+ microphone: undefined,
6436
+ camera: {
6437
+ readyState: 'ended',
6438
+ },
6439
+ screenShare: {
6440
+ audio: undefined,
6441
+ video: undefined,
6442
+ },
6443
+ },
6444
+ },
6445
+ {
6446
+ endedStream: 'screenShare audio',
6447
+ streams: {
6448
+ microphone: undefined,
6449
+ camera: undefined,
6450
+ screenShare: {
6451
+ audio: {
6452
+ readyState: 'ended',
6453
+ },
6454
+ video: undefined,
6455
+ },
6456
+ },
6457
+ },
6458
+ {
6459
+ endedStream: 'screenShare video',
6460
+ streams: {
6461
+ microphone: undefined,
6462
+ camera: undefined,
6463
+ screenShare: {
6464
+ audio: undefined,
6465
+ video: {
6466
+ readyState: 'ended',
6467
+ },
6468
+ },
6469
+ },
6470
+ },
6471
+ ].forEach(({endedStream, streams}) => {
6472
+ it(`throws error if readyState of ${endedStream} is ended`, async () => {
6473
+ assert.isRejected(meeting.publishStreams(streams));
6474
+ })
6475
+ });
6277
6476
  });
6278
6477
 
6279
6478
  describe('unpublishStreams', () => {
@@ -6408,11 +6607,11 @@ describe('plugin-meetings', () => {
6408
6607
  meeting.sendSlotManager.setNamedMediaGroups = sinon.stub().returns(undefined);
6409
6608
  });
6410
6609
  it('should throw error if not audio type', () => {
6411
- expect(() => meeting.setSendNamedMediaGroup(MediaType.VideoMain, 20)).to.throw(`cannot set send named media group which media type is ${MediaType.VideoMain}`)
6412
-
6610
+ expect(() => meeting.setSendNamedMediaGroup(MediaType.VideoMain, 20)).to.throw(
6611
+ `cannot set send named media group which media type is ${MediaType.VideoMain}`
6612
+ );
6413
6613
  });
6414
6614
  it('fails if there is no media connection', () => {
6415
-
6416
6615
  meeting.mediaProperties.webrtcMediaConnection = undefined;
6417
6616
  meeting.setSendNamedMediaGroup('AUDIO-MAIN', 20);
6418
6617
  assert.notCalled(meeting.sendSlotManager.setNamedMediaGroups);
@@ -6421,8 +6620,10 @@ describe('plugin-meetings', () => {
6421
6620
  it('success if there is media connection', () => {
6422
6621
  meeting.isMultistream = true;
6423
6622
  meeting.mediaProperties.webrtcMediaConnection = true;
6424
- meeting.setSendNamedMediaGroup("AUDIO-MAIN", 20);
6425
- assert.calledOnceWithExactly(meeting.sendSlotManager.setNamedMediaGroups, "AUDIO-MAIN", [{type: 1, value: 20}]);
6623
+ meeting.setSendNamedMediaGroup('AUDIO-MAIN', 20);
6624
+ assert.calledOnceWithExactly(meeting.sendSlotManager.setNamedMediaGroups, 'AUDIO-MAIN', [
6625
+ {type: 1, value: 20},
6626
+ ]);
6426
6627
  });
6427
6628
  });
6428
6629