@webex/plugin-meetings 3.0.0-beta.85 → 3.0.0-beta.87

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. package/dist/breakouts/breakout.js +1 -1
  2. package/dist/breakouts/index.js +1 -1
  3. package/dist/index.js +63 -1
  4. package/dist/index.js.map +1 -1
  5. package/dist/media/index.js +7 -4
  6. package/dist/media/index.js.map +1 -1
  7. package/dist/media/properties.js.map +1 -1
  8. package/dist/meeting/index.js +139 -88
  9. package/dist/meeting/index.js.map +1 -1
  10. package/dist/meeting/muteState.js +169 -26
  11. package/dist/meeting/muteState.js.map +1 -1
  12. package/dist/meeting/util.js.map +1 -1
  13. package/dist/meetings/index.js +20 -2
  14. package/dist/meetings/index.js.map +1 -1
  15. package/dist/types/controls-options-manager/util.d.ts +0 -102
  16. package/dist/types/index.d.ts +6 -5
  17. package/dist/types/media/properties.d.ts +1 -1
  18. package/dist/types/meeting/index.d.ts +8 -5
  19. package/dist/types/meeting/muteState.d.ts +58 -5
  20. package/dist/types/meetings/index.d.ts +1 -0
  21. package/dist/types/multistream/remoteMedia.d.ts +1 -29
  22. package/dist/types/multistream/remoteMediaGroup.d.ts +0 -9
  23. package/package.json +19 -18
  24. package/src/{index.js → index.ts} +15 -0
  25. package/src/media/index.ts +17 -18
  26. package/src/media/properties.ts +3 -7
  27. package/src/meeting/index.ts +108 -51
  28. package/src/meeting/muteState.ts +158 -15
  29. package/src/meeting/util.ts +1 -1
  30. package/src/meetings/index.ts +14 -0
  31. package/test/integration/spec/converged-space-meetings.js +4 -3
  32. package/test/integration/spec/journey.js +4 -3
  33. package/test/integration/spec/space-meeting.js +4 -3
  34. package/test/unit/spec/media/index.ts +30 -2
  35. package/test/unit/spec/meeting/index.js +24 -28
  36. package/test/unit/spec/meeting/muteState.js +226 -13
  37. package/test/utils/integrationTestUtils.js +64 -0
  38. package/test/utils/testUtils.js +0 -57
@@ -1,15 +1,17 @@
1
+ import {ServerMuteReason} from '@webex/media-helpers';
1
2
  import LoggerProxy from '../common/logs/logger-proxy';
2
3
  import ParameterError from '../common/errors/parameter';
3
4
  import PermissionError from '../common/errors/permission';
4
5
  import MeetingUtil from './util';
5
6
  import {AUDIO, VIDEO} from '../constants';
6
-
7
7
  /* Certain aspects of server interaction for video muting are not implemented as we currently don't support remote muting of video.
8
8
  If we ever need to support it, search for REMOTE_MUTE_VIDEO_MISSING_IMPLEMENTATION string to find the places that need updating
9
9
  */
10
10
 
11
11
  // eslint-disable-next-line import/prefer-default-export
12
- export const createMuteState = (type, meeting, mediaDirection) => {
12
+ export const createMuteState = (type, meeting, mediaDirection, sdkOwnsLocalTrack: boolean) => {
13
+ // todo: remove mediaDirection argument (SPARK-399695)
14
+ // todo: remove the meeting argument (SPARK-399695)
13
15
  if (type === AUDIO && !mediaDirection.sendAudio) {
14
16
  return null;
15
17
  }
@@ -21,7 +23,11 @@ export const createMuteState = (type, meeting, mediaDirection) => {
21
23
  `Meeting:muteState#createMuteState --> ${type}: creating MuteState for meeting id ${meeting?.id}`
22
24
  );
23
25
 
24
- return new MuteState(type, meeting);
26
+ const muteState = new MuteState(type, meeting, sdkOwnsLocalTrack);
27
+
28
+ muteState.init(meeting);
29
+
30
+ return muteState;
25
31
  };
26
32
 
27
33
  /** The purpose of this class is to manage the local and remote mute state and make sure that the server state always matches
@@ -34,18 +40,23 @@ class MuteState {
34
40
  pendingPromiseResolve: any;
35
41
  state: any;
36
42
  type: any;
43
+ sdkOwnsLocalTrack: boolean; // todo: remove this when doing SPARK-399695
44
+ ignoreMuteStateChange: boolean;
37
45
 
38
46
  /**
39
47
  * Constructor
40
48
  *
41
49
  * @param {String} type - audio or video
42
50
  * @param {Object} meeting - the meeting object (used for reading current remote mute status)
51
+ * @param {boolean} sdkOwnsLocalTrack - if false, then client app owns the local track (for now that's the case only for multistream meetings)
43
52
  */
44
- constructor(type: string, meeting: any) {
53
+ constructor(type: string, meeting: any, sdkOwnsLocalTrack: boolean) {
45
54
  if (type !== AUDIO && type !== VIDEO) {
46
55
  throw new ParameterError('Mute state is designed for handling audio or video only');
47
56
  }
48
57
  this.type = type;
58
+ this.sdkOwnsLocalTrack = sdkOwnsLocalTrack;
59
+ this.ignoreMuteStateChange = false;
49
60
  this.state = {
50
61
  client: {
51
62
  localMute: false,
@@ -63,6 +74,69 @@ class MuteState {
63
74
  this.pendingPromiseReject = null;
64
75
  }
65
76
 
77
+ /**
78
+ * Starts the mute state machine. Needs to be called after a new MuteState instance is created.
79
+ *
80
+ * @param {Object} meeting - the meeting object
81
+ * @returns {void}
82
+ */
83
+ public init(meeting: any) {
84
+ if (!this.sdkOwnsLocalTrack) {
85
+ this.applyUnmuteAllowedToTrack(meeting);
86
+
87
+ // if we are remotely muted, we need to apply that to the local track now (mute on-entry)
88
+ if (this.state.server.remoteMute) {
89
+ this.muteLocalTrack(meeting, this.state.server.remoteMute, 'remotelyMuted');
90
+ }
91
+
92
+ const initialMute =
93
+ this.type === AUDIO
94
+ ? meeting.mediaProperties.audioTrack?.muted
95
+ : meeting.mediaProperties.videoTrack?.muted;
96
+
97
+ LoggerProxy.logger.info(
98
+ `Meeting:muteState#start --> ${this.type}: local track initial mute state: ${initialMute}`
99
+ );
100
+
101
+ if (initialMute !== undefined) {
102
+ this.state.client.localMute = initialMute;
103
+
104
+ this.applyClientStateToServer(meeting);
105
+ }
106
+ }
107
+ }
108
+
109
+ /**
110
+ * This method needs to be called whenever the local audio/video track has changed.
111
+ * It reapplies the remote mute state onto the new track and also reads the current
112
+ * local mute state from the track and updates the internal state machine and sends
113
+ * any required requests to the server.
114
+ *
115
+ * @param {Object} meeting - the meeting object
116
+ * @returns {void}
117
+ */
118
+ public handleLocalTrackChange(meeting: any) {
119
+ return this.init(meeting);
120
+ }
121
+
122
+ /**
123
+ * Mutes/unmutes local track
124
+ *
125
+ * @param {Object} meeting - the meeting object
126
+ * @param {Boolean} mute - true to mute the track, false to unmute it
127
+ * @param {ServerMuteReason} reason - reason for muting/unmuting
128
+ * @returns {void}
129
+ */
130
+ private muteLocalTrack(meeting: any, mute: boolean, reason: ServerMuteReason) {
131
+ this.ignoreMuteStateChange = true;
132
+ if (this.type === AUDIO) {
133
+ meeting.mediaProperties.audioTrack?.setServerMuted(mute, reason);
134
+ } else {
135
+ meeting.mediaProperties.videoTrack?.setServerMuted(mute, reason);
136
+ }
137
+ this.ignoreMuteStateChange = false;
138
+ }
139
+
66
140
  /**
67
141
  * Handles mute/unmute request from the client/user. Returns a promise that's resolved once the server update is completed or
68
142
  * at the point that this request becomese superseded by another client request.
@@ -77,7 +151,8 @@ class MuteState {
77
151
  * @param {Boolean} [mute] true for muting, false for unmuting request
78
152
  * @returns {Promise}
79
153
  */
80
- public handleClientRequest(meeting?: object, mute?: boolean) {
154
+ public handleClientRequest(meeting: object, mute?: boolean) {
155
+ // todo: this whole method will be removed in SPARK-399695
81
156
  LoggerProxy.logger.info(
82
157
  `Meeting:muteState#handleClientRequest --> ${this.type}: user requesting new mute state: ${mute}`
83
158
  );
@@ -89,8 +164,9 @@ class MuteState {
89
164
  }
90
165
 
91
166
  // we don't check if we're already in the same state, because even if we were, we would still have to apply the mute state locally,
92
- // because the client may have changed the audio/vidoe tracks
167
+ // because the client may have changed the audio/video tracks
93
168
  this.state.client.localMute = mute;
169
+
94
170
  this.applyClientStateLocally(meeting);
95
171
 
96
172
  return new Promise((resolve, reject) => {
@@ -104,19 +180,51 @@ class MuteState {
104
180
  });
105
181
  }
106
182
 
183
+ /**
184
+ * This method should be called when the local track mute state is changed
185
+ * @public
186
+ * @memberof MuteState
187
+ * @param {Object} [meeting] the meeting object
188
+ * @param {Boolean} [mute] true for muting, false for unmuting request
189
+ * @returns {void}
190
+ */
191
+ public handleLocalTrackMuteStateChange(meeting?: object, mute?: boolean) {
192
+ if (this.ignoreMuteStateChange) {
193
+ return;
194
+ }
195
+ LoggerProxy.logger.info(
196
+ `Meeting:muteState#handleLocalTrackMuteStateChange --> ${this.type}: local track new mute state: ${mute}`
197
+ );
198
+
199
+ if (this.pendingPromiseReject) {
200
+ LoggerProxy.logger.error(
201
+ `Meeting:muteState#handleLocalTrackMuteStateChange --> ${this.type}: Local track mute state change handler called while a client request is handled - this should never happen!, mute state: ${mute}`
202
+ );
203
+ }
204
+
205
+ this.state.client.localMute = mute;
206
+
207
+ this.applyClientStateToServer(meeting);
208
+ }
209
+
107
210
  /**
108
211
  * Applies the current mute state to the local track (by enabling or disabling it accordingly)
109
212
  *
110
213
  * @public
111
214
  * @param {Object} [meeting] the meeting object
215
+ * @param {ServerMuteReason} reason - reason why we're applying our client state to the local track
112
216
  * @memberof MuteState
113
217
  * @returns {void}
114
218
  */
115
- public applyClientStateLocally(meeting?: any) {
116
- if (this.type === AUDIO) {
117
- meeting.mediaProperties.audioTrack?.setMuted(this.state.client.localMute);
219
+ public applyClientStateLocally(meeting?: any, reason?: ServerMuteReason) {
220
+ if (this.sdkOwnsLocalTrack) {
221
+ if (this.type === AUDIO) {
222
+ meeting.mediaProperties.audioTrack?.setMuted(this.state.client.localMute);
223
+ } else {
224
+ meeting.mediaProperties.videoTrack?.setMuted(this.state.client.localMute);
225
+ }
118
226
  } else {
119
- meeting.mediaProperties.videoTrack?.setMuted(this.state.client.localMute);
227
+ this.muteLocalTrack(meeting, this.state.client.localMute, reason);
120
228
  }
121
229
  }
122
230
 
@@ -190,6 +298,8 @@ class MuteState {
190
298
  }
191
299
  this.pendingPromiseResolve = null;
192
300
  this.pendingPromiseReject = null;
301
+
302
+ this.applyServerMuteToLocalTrack(meeting, 'clientRequestFailed');
193
303
  });
194
304
  }
195
305
 
@@ -265,24 +375,56 @@ class MuteState {
265
375
  });
266
376
  }
267
377
 
378
+ /** Sets the mute state of the local track according to what server thinks is our state
379
+ * @param {Object} meeting - the meeting object
380
+ * @param {ServerMuteReason} serverMuteReason - reason why we're applying server mute to the local track
381
+ * @returns {void}
382
+ */
383
+ private applyServerMuteToLocalTrack(meeting: any, serverMuteReason: ServerMuteReason) {
384
+ if (!this.sdkOwnsLocalTrack) {
385
+ const muted = this.state.server.localMute || this.state.server.remoteMute;
386
+
387
+ // update the local track mute state, but not this.state.client.localMute
388
+ this.muteLocalTrack(meeting, muted, serverMuteReason);
389
+ }
390
+ }
391
+
392
+ /** Applies the current value for unmute allowed to the underlying track
393
+ *
394
+ * @param {Meeting} meeting
395
+ * @returns {void}
396
+ */
397
+ private applyUnmuteAllowedToTrack(meeting: any) {
398
+ if (!this.sdkOwnsLocalTrack) {
399
+ if (this.type === AUDIO) {
400
+ meeting.mediaProperties.audioTrack?.setUnmuteAllowed(this.state.server.unmuteAllowed);
401
+ } else {
402
+ meeting.mediaProperties.videoTrack?.setUnmuteAllowed(this.state.server.unmuteAllowed);
403
+ }
404
+ }
405
+ }
406
+
268
407
  /**
269
408
  * This method should be called whenever the server remote mute state is changed
270
409
  *
271
410
  * @public
272
411
  * @memberof MuteState
412
+ * @param {Meeting} meeting
273
413
  * @param {Boolean} [muted] true if user is remotely muted, false otherwise
274
414
  * @param {Boolean} [unmuteAllowed] indicates if user is allowed to unmute self (false when "hard mute" feature is used)
275
415
  * @returns {undefined}
276
416
  */
277
- public handleServerRemoteMuteUpdate(muted?: boolean, unmuteAllowed?: boolean) {
417
+ public handleServerRemoteMuteUpdate(meeting: any, muted?: boolean, unmuteAllowed?: boolean) {
278
418
  LoggerProxy.logger.info(
279
419
  `Meeting:muteState#handleServerRemoteMuteUpdate --> ${this.type}: updating server remoteMute to (${muted})`
280
420
  );
281
- if (muted !== undefined) {
282
- this.state.server.remoteMute = muted;
283
- }
284
421
  if (unmuteAllowed !== undefined) {
285
422
  this.state.server.unmuteAllowed = unmuteAllowed;
423
+ this.applyUnmuteAllowedToTrack(meeting);
424
+ }
425
+ if (muted !== undefined) {
426
+ this.state.server.remoteMute = muted;
427
+ this.applyServerMuteToLocalTrack(meeting, 'remotelyMuted');
286
428
  }
287
429
  }
288
430
 
@@ -299,6 +441,7 @@ class MuteState {
299
441
  `Meeting:muteState#handleServerLocalUnmuteRequired --> ${this.type}: localAudioUnmuteRequired received -> doing local unmute`
300
442
  );
301
443
 
444
+ // todo: I'm seeing "you can now unmute yourself " popup when this happens - but same thing happens on web.w.c so we can ignore for now
302
445
  this.state.server.remoteMute = false;
303
446
  this.state.client.localMute = false;
304
447
 
@@ -310,7 +453,7 @@ class MuteState {
310
453
  this.pendingPromiseReject = null;
311
454
  }
312
455
 
313
- this.applyClientStateLocally(meeting);
456
+ this.applyClientStateLocally(meeting, 'localUnmuteRequired');
314
457
  this.applyClientStateToServer(meeting);
315
458
  }
316
459
 
@@ -1,5 +1,5 @@
1
1
  import {isEmpty} from 'lodash';
2
- import {LocalCameraTrack, LocalMicrophoneTrack} from '@webex/internal-media-core';
2
+ import {LocalCameraTrack, LocalMicrophoneTrack} from '@webex/media-helpers';
3
3
 
4
4
  import {MeetingNotActiveError, UserNotJoinedError} from '../common/errors/webex-errors';
5
5
  import Metrics from '../metrics';
@@ -6,6 +6,8 @@ import '@webex/internal-plugin-conversation';
6
6
  import {WebexPlugin} from '@webex/webex-core';
7
7
  import {setLogger} from '@webex/internal-media-core';
8
8
 
9
+ import * as mediaHelpersModule from '@webex/media-helpers';
10
+
9
11
  import 'webrtc-adapter';
10
12
 
11
13
  import Metrics from '../metrics';
@@ -149,6 +151,7 @@ export default class Meetings extends WebexPlugin {
149
151
  request: any;
150
152
  geoHintInfo: any;
151
153
  meetingInfo: any;
154
+ mediaHelpers: any;
152
155
 
153
156
  namespace = MEETINGS;
154
157
 
@@ -161,6 +164,17 @@ export default class Meetings extends WebexPlugin {
161
164
  constructor(...args) {
162
165
  super(...args);
163
166
 
167
+ /**
168
+ * The webrtc-core media helpers. This is a temporary solution required for the SDK sample app
169
+ * to be able to call media helper functions.
170
+ *
171
+ * @instance
172
+ * @type {Object}
173
+ * @private
174
+ * @memberof Meetings
175
+ */
176
+ this.mediaHelpers = mediaHelpersModule;
177
+
164
178
  /**
165
179
  * The Meetings request to interact with server
166
180
  * @instance
@@ -6,6 +6,7 @@ import BrowserDetection from '@webex/plugin-meetings/dist/common/browser-detecti
6
6
 
7
7
  import {MEDIA_SERVERS} from '../../utils/constants';
8
8
  import testUtils from '../../utils/testUtils';
9
+ import integrationTestUtils from '../../utils/integrationTestUtils';
9
10
  import webexTestUsers from '../../utils/webex-test-users';
10
11
 
11
12
  config();
@@ -142,9 +143,9 @@ skipInNode(describe)('plugin-meetings', () => {
142
143
  {scope: chris.meeting, event: 'media:negotiated'},
143
144
  ]);
144
145
 
145
- const addMediaAlice = testUtils.addMedia(alice, {multistream: true, expectedMediaReadyTypes: ['local']});
146
- const addMediaBob = testUtils.addMedia(bob, {multistream: true, expectedMediaReadyTypes: ['local']});
147
- const addMediaChris = testUtils.addMedia(chris, {multistream: true, expectedMediaReadyTypes: ['local']});
146
+ const addMediaAlice = integrationTestUtils.addMedia(alice, {multistream: true});
147
+ const addMediaBob = integrationTestUtils.addMedia(bob, {multistream: true});
148
+ const addMediaChris = integrationTestUtils.addMedia(chris, {multistream: true});
148
149
 
149
150
  await addMediaAlice;
150
151
  await addMediaBob;
@@ -9,6 +9,7 @@ import BrowserDetection from '@webex/plugin-meetings/dist/common/browser-detecti
9
9
 
10
10
  import DEFAULT_RESOLUTIONS from '../../../src/config';
11
11
  import testUtils from '../../utils/testUtils';
12
+ import integrationTestUtils from '../../utils/integrationTestUtils';
12
13
 
13
14
  require('dotenv').config();
14
15
 
@@ -301,7 +302,7 @@ skipInNode(describe)('plugin-meetings', () => {
301
302
  })
302
303
  .then(() =>
303
304
  Promise.all([
304
- testUtils.addMedia(alice),
305
+ integrationTestUtils.addMedia(alice),
305
306
  testUtils.waitForEvents([
306
307
  {scope: alice.meeting, event: 'meeting:media:local:start', user: alice},
307
308
  ]),
@@ -330,7 +331,7 @@ skipInNode(describe)('plugin-meetings', () => {
330
331
 
331
332
  it('bob adds media to the meeting', () =>
332
333
  Promise.all([
333
- testUtils.addMedia(bob),
334
+ integrationTestUtils.addMedia(bob),
334
335
  testUtils
335
336
  .waitForEvents([
336
337
  {scope: bob.meeting, event: 'meeting:media:local:start', user: bob},
@@ -886,7 +887,7 @@ skipInNode(describe)('plugin-meetings', () => {
886
887
  );
887
888
  })
888
889
  .then(() => testUtils.waitForStateChange(chris.meeting, 'JOINED'))
889
- .then(() => testUtils.addMedia(chris))
890
+ .then(() => integrationTestUtils.addMedia(chris))
890
891
  .then(() => assert(enumerateSpy.called));
891
892
  })
892
893
  .then(() =>
@@ -6,6 +6,7 @@ import MeetingInfoUtil from '@webex/plugin-meetings/dist/meeting-info/utilv2';
6
6
 
7
7
  import CMR from '../../utils/cmr';
8
8
  import testUtils from '../../utils/testUtils';
9
+ import integrationTestUtils from '../../utils/integrationTestUtils';
9
10
 
10
11
  require('dotenv').config();
11
12
 
@@ -136,7 +137,7 @@ skipInNode(describe)('plugin-meetings', () => {
136
137
  .then(() => testUtils.waitForStateChange(chris.meeting, 'JOINED')));
137
138
 
138
139
  it('Bob and Alice addsMedia', () =>
139
- testUtils.addMedia(bob).then(() => testUtils.addMedia(alice)));
140
+ integrationTestUtils.addMedia(bob).then(() => integrationTestUtils.addMedia(alice)));
140
141
 
141
142
  it('Bob has flowing streams on reconnect', () => {
142
143
  const retrieveStats = () => {
@@ -187,7 +188,7 @@ skipInNode(describe)('plugin-meetings', () => {
187
188
  ])
188
189
  )
189
190
  .then(() => testUtils.waitForStateChange(guest.meeting, 'JOINED'))
190
- .then(() => testUtils.addMedia(guest))
191
+ .then(() => integrationTestUtils.addMedia(guest))
191
192
  .catch((e) => {
192
193
  console.error('Error chris joining the meeting ', e);
193
194
  throw e;
@@ -414,7 +415,7 @@ skipInNode(describe)('plugin-meetings', () => {
414
415
  }),
415
416
  ])
416
417
  .then(() => testUtils.waitForStateChange(guest.meeting, 'JOINED'))
417
- .then(() => testUtils.addMedia(guest));
418
+ .then(() => integrationTestUtils.addMedia(guest));
418
419
  })
419
420
  .catch((e) => {
420
421
  console.error('Error guest joining the meeting ', e);
@@ -170,7 +170,6 @@ describe('createMediaConnection', () => {
170
170
  iceServers: [],
171
171
  enableMainAudio,
172
172
  enableMainVideo,
173
- bundlePolicy: undefined,
174
173
  },
175
174
  'some debug id'
176
175
  );
@@ -201,10 +200,39 @@ describe('createMediaConnection', () => {
201
200
  iceServers: [],
202
201
  enableMainAudio: true,
203
202
  enableMainVideo: true,
204
- bundlePolicy: undefined,
205
203
  },
206
204
  'debug string'
207
205
  );
206
+
207
+ it('does not pass bundlePolicy to MultistreamRoapMediaConnection if bundlePolicy is undefined', () => {
208
+ const multistreamRoapMediaConnectionConstructorStub = sinon
209
+ .stub(internalMediaModule, 'MultistreamRoapMediaConnection')
210
+ .returns(fakeRoapMediaConnection);
211
+
212
+ Media.createMediaConnection(true, 'debug string', {
213
+ mediaProperties: {
214
+ mediaDirection: {
215
+ sendAudio: true,
216
+ sendVideo: true,
217
+ sendShare: false,
218
+ receiveAudio: true,
219
+ receiveVideo: true,
220
+ receiveShare: true,
221
+ },
222
+ },
223
+ bundlePolicy: undefined
224
+ });
225
+ assert.calledOnce(multistreamRoapMediaConnectionConstructorStub);
226
+ assert.calledWith(
227
+ multistreamRoapMediaConnectionConstructorStub,
228
+ {
229
+ iceServers: [],
230
+ enableMainAudio: true,
231
+ enableMainVideo: true,
232
+ },
233
+ 'debug string'
234
+ );
235
+ });
208
236
  });
209
237
 
210
238
  it('passes empty ICE servers array to RoapMediaConnection if turnServerInfo is undefined (multistream disabled)', () => {
@@ -3753,15 +3753,19 @@ describe('plugin-meetings', () => {
3753
3753
  audioTrack = {
3754
3754
  id: 'audio track',
3755
3755
  getSettings: sinon.stub().returns({}),
3756
+ on: sinon.stub(),
3757
+ off: sinon.stub(),
3756
3758
  };
3757
3759
  videoTrack = {
3758
3760
  id: 'video track',
3759
3761
  getSettings: sinon.stub().returns({}),
3762
+ on: sinon.stub(),
3763
+ off: sinon.stub(),
3760
3764
  };
3761
3765
  videoShareTrack = {
3762
3766
  id: 'share track',
3763
3767
  on: sinon.stub(),
3764
- removeEventListener: sinon.stub(),
3768
+ off: sinon.stub(),
3765
3769
  getSettings: sinon.stub().returns({}),
3766
3770
  };
3767
3771
  meeting.requestScreenShareFloor = sinon.stub().resolves({});
@@ -3771,6 +3775,7 @@ describe('plugin-meetings', () => {
3771
3775
  sendVideo: false,
3772
3776
  sendShare: false,
3773
3777
  };
3778
+ meeting.isMultistream = true;
3774
3779
  meeting.mediaProperties.webrtcMediaConnection = {
3775
3780
  publishTrack: sinon.stub().resolves({}),
3776
3781
  unpublishTrack: sinon.stub().resolves({}),
@@ -3814,10 +3819,7 @@ describe('plugin-meetings', () => {
3814
3819
  await assert.isRejected(meeting.publishTracks({audio: {id: 'some audio track'}}));
3815
3820
  });
3816
3821
 
3817
- const checkAudioPublished = () => {
3818
- assert.calledWith(MediaUtil.createMediaStream, [audioTrack]);
3819
- assert.calledOnce(LocalMicrophoneTrackConstructorStub);
3820
-
3822
+ const checkAudioPublished = (track) => {
3821
3823
  assert.calledWith(
3822
3824
  createMuteStateStub,
3823
3825
  'audio',
@@ -3826,16 +3828,13 @@ describe('plugin-meetings', () => {
3826
3828
  );
3827
3829
  assert.calledWith(
3828
3830
  meeting.mediaProperties.webrtcMediaConnection.publishTrack,
3829
- fakeLocalMicrophoneTrack
3831
+ track
3830
3832
  );
3831
- assert.equal(meeting.mediaProperties.audioTrack, fakeLocalMicrophoneTrack);
3833
+ assert.equal(meeting.mediaProperties.audioTrack, track);
3832
3834
  assert.equal(meeting.mediaProperties.mediaDirection.sendAudio, true);
3833
3835
  };
3834
3836
 
3835
- const checkVideoPublished = () => {
3836
- assert.calledWith(MediaUtil.createMediaStream, [videoTrack]);
3837
- assert.calledOnce(LocalCameraTrackConstructorStub);
3838
-
3837
+ const checkVideoPublished = (track) => {
3839
3838
  assert.calledWith(
3840
3839
  createMuteStateStub,
3841
3840
  'video',
@@ -3844,23 +3843,20 @@ describe('plugin-meetings', () => {
3844
3843
  );
3845
3844
  assert.calledWith(
3846
3845
  meeting.mediaProperties.webrtcMediaConnection.publishTrack,
3847
- fakeLocalCameraTrack
3846
+ track
3848
3847
  );
3849
- assert.equal(meeting.mediaProperties.videoTrack, fakeLocalCameraTrack);
3848
+ assert.equal(meeting.mediaProperties.videoTrack, track);
3850
3849
  assert.equal(meeting.mediaProperties.mediaDirection.sendVideo, true);
3851
3850
  };
3852
3851
 
3853
- const checkScreenShareVideoPublished = () => {
3852
+ const checkScreenShareVideoPublished = (track) => {
3854
3853
  assert.calledOnce(meeting.requestScreenShareFloor);
3855
3854
 
3856
- assert.calledWith(MediaUtil.createMediaStream, [videoShareTrack]);
3857
- assert.calledOnce(LocalDisplayTrackConstructorStub);
3858
-
3859
3855
  assert.calledWith(
3860
3856
  meeting.mediaProperties.webrtcMediaConnection.publishTrack,
3861
- fakeLocalDisplayTrack
3857
+ track
3862
3858
  );
3863
- assert.equal(meeting.mediaProperties.shareTrack, fakeLocalDisplayTrack);
3859
+ assert.equal(meeting.mediaProperties.shareTrack, track);
3864
3860
  assert.equal(meeting.mediaProperties.mediaDirection.sendShare, true);
3865
3861
  };
3866
3862
 
@@ -3868,7 +3864,7 @@ describe('plugin-meetings', () => {
3868
3864
  await meeting.publishTracks({screenShare: {video: videoShareTrack}});
3869
3865
 
3870
3866
  assert.calledOnce(meeting.mediaProperties.webrtcMediaConnection.publishTrack);
3871
- checkScreenShareVideoPublished();
3867
+ checkScreenShareVideoPublished(videoShareTrack);
3872
3868
  });
3873
3869
 
3874
3870
  it('creates MuteState instance and publishes the track for main audio', async () => {
@@ -3876,7 +3872,7 @@ describe('plugin-meetings', () => {
3876
3872
 
3877
3873
  assert.calledOnce(createMuteStateStub);
3878
3874
  assert.calledOnce(meeting.mediaProperties.webrtcMediaConnection.publishTrack);
3879
- checkAudioPublished();
3875
+ checkAudioPublished(audioTrack);
3880
3876
  });
3881
3877
 
3882
3878
  it('creates MuteState instance and publishes the track for main video', async () => {
@@ -3884,7 +3880,7 @@ describe('plugin-meetings', () => {
3884
3880
 
3885
3881
  assert.calledOnce(createMuteStateStub);
3886
3882
  assert.calledOnce(meeting.mediaProperties.webrtcMediaConnection.publishTrack);
3887
- checkVideoPublished();
3883
+ checkVideoPublished(videoTrack);
3888
3884
  });
3889
3885
 
3890
3886
  it('publishes audio, video and screen share together', async () => {
@@ -3898,9 +3894,9 @@ describe('plugin-meetings', () => {
3898
3894
 
3899
3895
  assert.calledTwice(createMuteStateStub);
3900
3896
  assert.calledThrice(meeting.mediaProperties.webrtcMediaConnection.publishTrack);
3901
- checkAudioPublished();
3902
- checkVideoPublished();
3903
- checkScreenShareVideoPublished();
3897
+ checkAudioPublished(audioTrack);
3898
+ checkVideoPublished(videoTrack);
3899
+ checkScreenShareVideoPublished(videoShareTrack);
3904
3900
  });
3905
3901
  });
3906
3902
 
@@ -3916,7 +3912,7 @@ describe('plugin-meetings', () => {
3916
3912
  const checkAudioUnpublished = () => {
3917
3913
  assert.calledWith(
3918
3914
  meeting.mediaProperties.webrtcMediaConnection.unpublishTrack,
3919
- fakeLocalMicrophoneTrack
3915
+ audioTrack
3920
3916
  );
3921
3917
 
3922
3918
  assert.equal(meeting.mediaProperties.audioTrack, null);
@@ -3926,7 +3922,7 @@ describe('plugin-meetings', () => {
3926
3922
  const checkVideoUnpublished = () => {
3927
3923
  assert.calledWith(
3928
3924
  meeting.mediaProperties.webrtcMediaConnection.unpublishTrack,
3929
- fakeLocalCameraTrack
3925
+ videoTrack
3930
3926
  );
3931
3927
 
3932
3928
  assert.equal(meeting.mediaProperties.videoTrack, null);
@@ -3936,7 +3932,7 @@ describe('plugin-meetings', () => {
3936
3932
  const checkScreenShareVideoUnpublished = () => {
3937
3933
  assert.calledWith(
3938
3934
  meeting.mediaProperties.webrtcMediaConnection.unpublishTrack,
3939
- fakeLocalDisplayTrack
3935
+ videoShareTrack
3940
3936
  );
3941
3937
 
3942
3938
  assert.calledOnce(meeting.requestScreenShareFloor);