@webex/media-helpers 3.0.0-bnr.5 → 3.0.0-next.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +72 -0
- package/babel.config.json +13 -0
- package/dist/constants.d.ts +13 -0
- package/dist/index.d.ts +4 -0
- package/dist/webrtc-core.d.ts +48 -0
- package/package.json +22 -5
- package/src/constants.ts +29 -0
- package/src/index.ts +22 -11
- package/src/webrtc-core.ts +40 -23
- package/test/unit/spec/webrtc-core.js +116 -86
- package/tsconfig.json +6 -0
- package/dist/index.js +0 -68
- package/dist/index.js.map +0 -1
- package/dist/webrtc-core.js +0 -185
- package/dist/webrtc-core.js.map +0 -1
package/README.md
CHANGED
|
@@ -21,6 +21,78 @@ npm install --save @webex/media-helpers
|
|
|
21
21
|
|
|
22
22
|
## Usage
|
|
23
23
|
|
|
24
|
+
### Effects
|
|
25
|
+
There are two effects included in this package:
|
|
26
|
+
|
|
27
|
+
Virtual background (e.g., blur, image replacement, video replacement)
|
|
28
|
+
Noise reduction (e.g., background noise removal)
|
|
29
|
+
|
|
30
|
+
#### Virtual background
|
|
31
|
+
The virtual background effect provides a virtual background for video calling. The virtual background may be an image, an mp4 video, or the user's background with blur applied.
|
|
32
|
+
|
|
33
|
+
**Applying the effect**
|
|
34
|
+
1. Create a new camera track instance by using LocalCameraTrack() method.
|
|
35
|
+
2. Create a VirtualBackgroundEffect instance by passing appropriate constraints.
|
|
36
|
+
3. Use addEffect() method on cameraTrack to apply effect on it.
|
|
37
|
+
4. Enable the effect after adding it to cameraTrack using enable() method available on effect. Effect will be enabled on cameraTrack.
|
|
38
|
+
|
|
39
|
+
```javascript
|
|
40
|
+
import {LocalCameraTrack, VirtualBackgroundEffect} from '@webex/media-helpers';
|
|
41
|
+
|
|
42
|
+
// Create a new video stream by a getting user's video media.
|
|
43
|
+
const stream = await navigator.mediaDevices.getUserMedia({ video: { width, height } });
|
|
44
|
+
|
|
45
|
+
const videoTrackFromLocalStream = stream.getVideoTracks()[0];
|
|
46
|
+
|
|
47
|
+
const cameraTrack = new LocalCameraTrack(new MediaStream([videoTrackFromLocalStream]));
|
|
48
|
+
|
|
49
|
+
// Create the effect.
|
|
50
|
+
const effect = new VirtualBackgroundEffect({
|
|
51
|
+
authToken: '<encoded-string>',
|
|
52
|
+
mode: `BLUR`,
|
|
53
|
+
blurStrength: `STRONG`,
|
|
54
|
+
quality: `LOW`,
|
|
55
|
+
});
|
|
56
|
+
|
|
57
|
+
// add the effect on the input camera track.
|
|
58
|
+
await cameraTrack.addEffect("background-blur", effect);
|
|
59
|
+
|
|
60
|
+
//enable the effect once it is added to the track
|
|
61
|
+
await effect.enable()
|
|
62
|
+
```
|
|
63
|
+
|
|
64
|
+
#### Noise reduction
|
|
65
|
+
The noise reduction effect removes background noise from an audio stream to provide clear audio for calling.
|
|
66
|
+
|
|
67
|
+
**Applying the effect**
|
|
68
|
+
1. Create a new microphone track instance by using LocalMicrophoneTrack() method.
|
|
69
|
+
2. Create a NoiseReductionEffect instance by passing appropriate constraints.
|
|
70
|
+
3. Use addEffect() method on microphoneTrack to apply effect on it.
|
|
71
|
+
4. Enable the effect after adding it to microphoneTrack using enable() method available on effect. Effect will be enabled on microphoneTrack.
|
|
72
|
+
|
|
73
|
+
```javascript
|
|
74
|
+
import {LocalMicrophoneTrack, NoiseReductionEffect} from '@webex/media-helpers';
|
|
75
|
+
|
|
76
|
+
// Create a new audio stream by getting a user's audio media.
|
|
77
|
+
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
|
78
|
+
|
|
79
|
+
const audioTrackFromLocalStream = stream.getAudioTracks()[0];
|
|
80
|
+
|
|
81
|
+
const microphoneTrack = new LocalMicrophoneTrack(new MediaStream([audioTrackFromLocalStream]));
|
|
82
|
+
|
|
83
|
+
// Create the effect.
|
|
84
|
+
const effect = new NoiseReductionEffect({
|
|
85
|
+
authToken: '<encoded-string>',
|
|
86
|
+
mode: 'WORKLET', // or 'LEGACY'
|
|
87
|
+
});
|
|
88
|
+
|
|
89
|
+
// add the effect on microphone track.
|
|
90
|
+
await microphoneTrack.addEffect("background-noise-removal", effect);
|
|
91
|
+
|
|
92
|
+
//enable the effect once it is added to the track
|
|
93
|
+
await effect.enable()
|
|
94
|
+
```
|
|
95
|
+
|
|
24
96
|
## Maintainers
|
|
25
97
|
|
|
26
98
|
This package is maintained by [Cisco Webex for Developers](https://developer.webex.com/).
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { VideoDeviceConstraints } from '@webex/internal-media-core';
|
|
2
|
+
export declare enum FacingMode {
|
|
3
|
+
user = "user",
|
|
4
|
+
environment = "environment"
|
|
5
|
+
}
|
|
6
|
+
export declare enum DisplaySurface {
|
|
7
|
+
browser = "browser",
|
|
8
|
+
monitor = "monitor",
|
|
9
|
+
window = "window"
|
|
10
|
+
}
|
|
11
|
+
export declare const PresetCameraConstraints: {
|
|
12
|
+
[key: string]: VideoDeviceConstraints;
|
|
13
|
+
};
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
export { getDevices, LocalStream, LocalDisplayStream, LocalSystemAudioStream, LocalStreamEventNames, StreamEventNames, type ServerMuteReason, LocalMicrophoneStreamEventNames, LocalCameraStreamEventNames, LocalMicrophoneStream, LocalCameraStream, createMicrophoneStream, createCameraStream, createDisplayStream, createDisplayStreamWithAudio, } from './webrtc-core';
|
|
2
|
+
export { NoiseReductionEffect, VirtualBackgroundEffect } from '@webex/web-media-effects';
|
|
3
|
+
export type { NoiseReductionEffectOptions, VirtualBackgroundEffectOptions, } from '@webex/web-media-effects';
|
|
4
|
+
export { FacingMode, DisplaySurface, PresetCameraConstraints } from './constants';
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
import { AudioDeviceConstraints, LocalDisplayStream, LocalSystemAudioStream, LocalMicrophoneStream as WcmeLocalMicrophoneStream, LocalCameraStream as WcmeLocalCameraStream, VideoDeviceConstraints } from '@webex/internal-media-core';
|
|
2
|
+
import { TypedEvent } from '@webex/ts-events';
|
|
3
|
+
export { getDevices, LocalStream, LocalDisplayStream, LocalSystemAudioStream, LocalStreamEventNames, StreamEventNames, RemoteStream, } from '@webex/internal-media-core';
|
|
4
|
+
export type ServerMuteReason = 'remotelyMuted' | 'clientRequestFailed' | 'localUnmuteRequired';
|
|
5
|
+
export declare enum LocalMicrophoneStreamEventNames {
|
|
6
|
+
ServerMuted = "muted:byServer"
|
|
7
|
+
}
|
|
8
|
+
export declare enum LocalCameraStreamEventNames {
|
|
9
|
+
ServerMuted = "muted:byServer"
|
|
10
|
+
}
|
|
11
|
+
export declare class LocalMicrophoneStream extends WcmeLocalMicrophoneStream {
|
|
12
|
+
private unmuteAllowed;
|
|
13
|
+
[LocalMicrophoneStreamEventNames.ServerMuted]: TypedEvent<(muted: boolean, reason: ServerMuteReason) => void>;
|
|
14
|
+
/**
|
|
15
|
+
* @internal
|
|
16
|
+
*/
|
|
17
|
+
setUnmuteAllowed(allowed: any): void;
|
|
18
|
+
/**
|
|
19
|
+
* @returns true if user is allowed to unmute the Stream, false otherwise
|
|
20
|
+
*/
|
|
21
|
+
isUnmuteAllowed(): boolean;
|
|
22
|
+
setMuted(muted: boolean): void;
|
|
23
|
+
/**
|
|
24
|
+
* @internal
|
|
25
|
+
*/
|
|
26
|
+
setServerMuted(muted: boolean, reason: ServerMuteReason): void;
|
|
27
|
+
}
|
|
28
|
+
export declare class LocalCameraStream extends WcmeLocalCameraStream {
|
|
29
|
+
private unmuteAllowed;
|
|
30
|
+
[LocalCameraStreamEventNames.ServerMuted]: TypedEvent<(muted: boolean, reason: ServerMuteReason) => void>;
|
|
31
|
+
/**
|
|
32
|
+
* @internal
|
|
33
|
+
*/
|
|
34
|
+
setUnmuteAllowed(allowed: any): void;
|
|
35
|
+
/**
|
|
36
|
+
* @returns true if user is allowed to unmute the Stream, false otherwise
|
|
37
|
+
*/
|
|
38
|
+
isUnmuteAllowed(): boolean;
|
|
39
|
+
setMuted(muted: boolean): void;
|
|
40
|
+
/**
|
|
41
|
+
* @internal
|
|
42
|
+
*/
|
|
43
|
+
setServerMuted(muted: boolean, reason: ServerMuteReason): void;
|
|
44
|
+
}
|
|
45
|
+
export declare const createMicrophoneStream: (constraints?: AudioDeviceConstraints) => Promise<LocalMicrophoneStream>;
|
|
46
|
+
export declare const createCameraStream: (constraints?: VideoDeviceConstraints) => Promise<LocalCameraStream>;
|
|
47
|
+
export declare const createDisplayStream: () => Promise<LocalDisplayStream>;
|
|
48
|
+
export declare const createDisplayStreamWithAudio: () => Promise<[LocalDisplayStream, LocalSystemAudioStream]>;
|
package/package.json
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@webex/media-helpers",
|
|
3
|
-
"version": "3.0.0-bnr.5",
|
|
4
3
|
"description": "",
|
|
5
4
|
"license": "Cisco EULA (https://www.cisco.com/c/en/us/products/end-user-license-agreement.html)",
|
|
6
5
|
"main": "dist/index.js",
|
|
@@ -13,8 +12,23 @@
|
|
|
13
12
|
"engines": {
|
|
14
13
|
"node": ">=16"
|
|
15
14
|
},
|
|
15
|
+
"scripts": {
|
|
16
|
+
"build:src": "yarn run -T tsc --declaration true --declarationDir ./dist",
|
|
17
|
+
"test:broken": "yarn test:style && yarn test:unit && yarn test:integration && yarn test:browser",
|
|
18
|
+
"test:browser:broken": "webex-legacy-tools test --integration --unit --runner karma",
|
|
19
|
+
"test:integration:broken": "webex-legacy-tools test --integration --runner mocha",
|
|
20
|
+
"test:style": "eslint 'src/**/*.ts' --fix",
|
|
21
|
+
"test:unit:broken": "webex-legacy-tools test --unit --runner mocha",
|
|
22
|
+
"deploy:npm": "npm publish"
|
|
23
|
+
},
|
|
16
24
|
"dependencies": {
|
|
17
|
-
"@webex/
|
|
25
|
+
"@webex/babel-config-legacy": "workspace:^",
|
|
26
|
+
"@webex/eslint-config-legacy": "workspace:^",
|
|
27
|
+
"@webex/internal-media-core": "^2.0.0",
|
|
28
|
+
"@webex/jest-config-legacy": "workspace:^",
|
|
29
|
+
"@webex/legacy-tools": "workspace:^",
|
|
30
|
+
"@webex/ts-events": "^1.1.0",
|
|
31
|
+
"@webex/web-media-effects": "^2.12.0"
|
|
18
32
|
},
|
|
19
33
|
"browserify": {
|
|
20
34
|
"transform": [
|
|
@@ -23,8 +37,11 @@
|
|
|
23
37
|
]
|
|
24
38
|
},
|
|
25
39
|
"devDependencies": {
|
|
26
|
-
"@
|
|
27
|
-
"@webex/test-helper-
|
|
40
|
+
"@babel/preset-typescript": "7.22.11",
|
|
41
|
+
"@webex/test-helper-chai": "workspace:^",
|
|
42
|
+
"@webex/test-helper-mock-webex": "workspace:^",
|
|
43
|
+
"eslint": "^8.24.0",
|
|
28
44
|
"sinon": "^9.2.4"
|
|
29
|
-
}
|
|
45
|
+
},
|
|
46
|
+
"version": "3.0.0-next.2"
|
|
30
47
|
}
|
package/src/constants.ts
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import {VideoDeviceConstraints} from '@webex/internal-media-core';
|
|
2
|
+
|
|
3
|
+
export enum FacingMode {
|
|
4
|
+
user = 'user',
|
|
5
|
+
environment = 'environment',
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
// can be used later on when we add constraints in create display track
|
|
9
|
+
export enum DisplaySurface {
|
|
10
|
+
browser = 'browser',
|
|
11
|
+
monitor = 'monitor',
|
|
12
|
+
window = 'window',
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export const PresetCameraConstraints: {[key: string]: VideoDeviceConstraints} = {
|
|
16
|
+
'1080p': {frameRate: 30, width: 1920, height: 1080},
|
|
17
|
+
|
|
18
|
+
'720p': {frameRate: 30, width: 1280, height: 720},
|
|
19
|
+
|
|
20
|
+
'480p': {frameRate: 30, width: 640, height: 480},
|
|
21
|
+
|
|
22
|
+
'360p': {frameRate: 30, width: 640, height: 360},
|
|
23
|
+
|
|
24
|
+
'240p': {frameRate: 30, width: 320, height: 240},
|
|
25
|
+
|
|
26
|
+
'180p': {frameRate: 30, width: 320, height: 180},
|
|
27
|
+
|
|
28
|
+
'120p': {frameRate: 30, width: 160, height: 120},
|
|
29
|
+
};
|
package/src/index.ts
CHANGED
|
@@ -1,14 +1,25 @@
|
|
|
1
1
|
export {
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
2
|
+
getDevices,
|
|
3
|
+
LocalStream,
|
|
4
|
+
LocalDisplayStream,
|
|
5
|
+
LocalSystemAudioStream,
|
|
6
|
+
LocalStreamEventNames,
|
|
7
|
+
StreamEventNames,
|
|
6
8
|
type ServerMuteReason,
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
9
|
+
LocalMicrophoneStreamEventNames,
|
|
10
|
+
LocalCameraStreamEventNames,
|
|
11
|
+
LocalMicrophoneStream,
|
|
12
|
+
LocalCameraStream,
|
|
13
|
+
createMicrophoneStream,
|
|
14
|
+
createCameraStream,
|
|
15
|
+
createDisplayStream,
|
|
16
|
+
createDisplayStreamWithAudio,
|
|
14
17
|
} from './webrtc-core';
|
|
18
|
+
|
|
19
|
+
export {NoiseReductionEffect, VirtualBackgroundEffect} from '@webex/web-media-effects';
|
|
20
|
+
export type {
|
|
21
|
+
NoiseReductionEffectOptions,
|
|
22
|
+
VirtualBackgroundEffectOptions,
|
|
23
|
+
} from '@webex/web-media-effects';
|
|
24
|
+
|
|
25
|
+
export {FacingMode, DisplaySurface, PresetCameraConstraints} from './constants';
|
package/src/webrtc-core.ts
CHANGED
|
@@ -3,20 +3,26 @@
|
|
|
3
3
|
/* eslint-disable require-jsdoc */
|
|
4
4
|
import {
|
|
5
5
|
AudioDeviceConstraints,
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
6
|
+
createCameraStream as wcmeCreateCameraStream,
|
|
7
|
+
createDisplayStream as wcmeCreateDisplayStream,
|
|
8
|
+
createDisplayStreamWithAudio as wcmeCreateDisplayStreamWithAudio,
|
|
9
|
+
createMicrophoneStream as wcmeCreateMicrophoneStream,
|
|
10
|
+
LocalDisplayStream,
|
|
11
|
+
LocalSystemAudioStream,
|
|
12
|
+
LocalMicrophoneStream as WcmeLocalMicrophoneStream,
|
|
13
|
+
LocalCameraStream as WcmeLocalCameraStream,
|
|
12
14
|
VideoDeviceConstraints,
|
|
13
15
|
} from '@webex/internal-media-core';
|
|
16
|
+
import {TypedEvent} from '@webex/ts-events';
|
|
14
17
|
|
|
15
18
|
export {
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
19
|
+
getDevices,
|
|
20
|
+
LocalStream,
|
|
21
|
+
LocalDisplayStream,
|
|
22
|
+
LocalSystemAudioStream,
|
|
23
|
+
LocalStreamEventNames,
|
|
24
|
+
StreamEventNames,
|
|
25
|
+
RemoteStream,
|
|
20
26
|
} from '@webex/internal-media-core';
|
|
21
27
|
|
|
22
28
|
export type ServerMuteReason =
|
|
@@ -25,18 +31,22 @@ export type ServerMuteReason =
|
|
|
25
31
|
| 'localUnmuteRequired'; // server forced the client to be unmuted
|
|
26
32
|
|
|
27
33
|
// these events are in addition to WCME events. This will be properly typed once webrtc-core event types inheritance is fixed
|
|
28
|
-
export enum
|
|
34
|
+
export enum LocalMicrophoneStreamEventNames {
|
|
29
35
|
ServerMuted = 'muted:byServer',
|
|
30
36
|
}
|
|
31
37
|
|
|
32
38
|
// these events are in addition to WCME events. This will be properly typed once webrtc-core event types inheritance is fixed
|
|
33
|
-
export enum
|
|
39
|
+
export enum LocalCameraStreamEventNames {
|
|
34
40
|
ServerMuted = 'muted:byServer',
|
|
35
41
|
}
|
|
36
42
|
|
|
37
|
-
export class
|
|
43
|
+
export class LocalMicrophoneStream extends WcmeLocalMicrophoneStream {
|
|
38
44
|
private unmuteAllowed = true;
|
|
39
45
|
|
|
46
|
+
[LocalMicrophoneStreamEventNames.ServerMuted] = new TypedEvent<
|
|
47
|
+
(muted: boolean, reason: ServerMuteReason) => void
|
|
48
|
+
>();
|
|
49
|
+
|
|
40
50
|
/**
|
|
41
51
|
* @internal
|
|
42
52
|
*/
|
|
@@ -45,7 +55,7 @@ export class LocalMicrophoneTrack extends WcmeLocalMicrophoneTrack {
|
|
|
45
55
|
}
|
|
46
56
|
|
|
47
57
|
/**
|
|
48
|
-
* @returns true if user is allowed to unmute the
|
|
58
|
+
* @returns true if user is allowed to unmute the Stream, false otherwise
|
|
49
59
|
*/
|
|
50
60
|
isUnmuteAllowed() {
|
|
51
61
|
return this.unmuteAllowed;
|
|
@@ -67,14 +77,18 @@ export class LocalMicrophoneTrack extends WcmeLocalMicrophoneTrack {
|
|
|
67
77
|
setServerMuted(muted: boolean, reason: ServerMuteReason) {
|
|
68
78
|
if (muted !== this.muted) {
|
|
69
79
|
this.setMuted(muted);
|
|
70
|
-
this.emit(
|
|
80
|
+
this[LocalMicrophoneStreamEventNames.ServerMuted].emit(muted, reason);
|
|
71
81
|
}
|
|
72
82
|
}
|
|
73
83
|
}
|
|
74
84
|
|
|
75
|
-
export class
|
|
85
|
+
export class LocalCameraStream extends WcmeLocalCameraStream {
|
|
76
86
|
private unmuteAllowed = true;
|
|
77
87
|
|
|
88
|
+
[LocalCameraStreamEventNames.ServerMuted] = new TypedEvent<
|
|
89
|
+
(muted: boolean, reason: ServerMuteReason) => void
|
|
90
|
+
>();
|
|
91
|
+
|
|
78
92
|
/**
|
|
79
93
|
* @internal
|
|
80
94
|
*/
|
|
@@ -83,7 +97,7 @@ export class LocalCameraTrack extends WcmeLocalCameraTrack {
|
|
|
83
97
|
}
|
|
84
98
|
|
|
85
99
|
/**
|
|
86
|
-
* @returns true if user is allowed to unmute the
|
|
100
|
+
* @returns true if user is allowed to unmute the Stream, false otherwise
|
|
87
101
|
*/
|
|
88
102
|
isUnmuteAllowed() {
|
|
89
103
|
return this.unmuteAllowed;
|
|
@@ -105,15 +119,18 @@ export class LocalCameraTrack extends WcmeLocalCameraTrack {
|
|
|
105
119
|
setServerMuted(muted: boolean, reason: ServerMuteReason) {
|
|
106
120
|
if (muted !== this.muted) {
|
|
107
121
|
this.setMuted(muted);
|
|
108
|
-
this.emit(
|
|
122
|
+
this[LocalCameraStreamEventNames.ServerMuted].emit(muted, reason);
|
|
109
123
|
}
|
|
110
124
|
}
|
|
111
125
|
}
|
|
112
126
|
|
|
113
|
-
export const
|
|
114
|
-
|
|
127
|
+
export const createMicrophoneStream = (constraints?: AudioDeviceConstraints) =>
|
|
128
|
+
wcmeCreateMicrophoneStream(LocalMicrophoneStream, constraints);
|
|
129
|
+
|
|
130
|
+
export const createCameraStream = (constraints?: VideoDeviceConstraints) =>
|
|
131
|
+
wcmeCreateCameraStream(LocalCameraStream, constraints);
|
|
115
132
|
|
|
116
|
-
export const
|
|
117
|
-
wcmeCreateCameraTrack(LocalCameraTrack, constraints);
|
|
133
|
+
export const createDisplayStream = () => wcmeCreateDisplayStream(LocalDisplayStream);
|
|
118
134
|
|
|
119
|
-
export const
|
|
135
|
+
export const createDisplayStreamWithAudio = () =>
|
|
136
|
+
wcmeCreateDisplayStreamWithAudio(LocalDisplayStream, LocalSystemAudioStream);
|
|
@@ -1,115 +1,145 @@
|
|
|
1
1
|
import {assert, expect} from '@webex/test-helper-chai';
|
|
2
2
|
import sinon from 'sinon';
|
|
3
|
-
import {
|
|
4
|
-
|
|
3
|
+
import {
|
|
4
|
+
LocalCameraStream,
|
|
5
|
+
LocalMicrophoneStream,
|
|
6
|
+
LocalMicrophoneStreamEventNames,
|
|
7
|
+
LocalCameraStreamEventNames,
|
|
8
|
+
LocalDisplayStream,
|
|
9
|
+
LocalSystemAudioStream,
|
|
10
|
+
createCameraStream,
|
|
11
|
+
createMicrophoneStream,
|
|
12
|
+
createDisplayStream,
|
|
13
|
+
createDisplayStreamWithAudio,
|
|
14
|
+
} from '../../../src/webrtc-core';
|
|
15
|
+
import * as wcmeStreams from '@webex/internal-media-core';
|
|
5
16
|
|
|
6
17
|
describe('media-helpers', () => {
|
|
7
18
|
describe('webrtc-core', () => {
|
|
8
|
-
|
|
9
19
|
const classesToTest = [
|
|
10
|
-
{
|
|
11
|
-
|
|
20
|
+
{
|
|
21
|
+
className: LocalCameraStream,
|
|
22
|
+
title: 'LocalCameraStream',
|
|
23
|
+
event: LocalCameraStreamEventNames,
|
|
24
|
+
createFn: createCameraStream,
|
|
25
|
+
spyFn: 'createCameraStream',
|
|
26
|
+
},
|
|
27
|
+
{
|
|
28
|
+
className: LocalMicrophoneStream,
|
|
29
|
+
title: 'LocalMicrophoneStream',
|
|
30
|
+
event: LocalMicrophoneStreamEventNames,
|
|
31
|
+
createFn: createMicrophoneStream,
|
|
32
|
+
spyFn: 'createMicrophoneStream',
|
|
33
|
+
},
|
|
12
34
|
];
|
|
13
35
|
|
|
14
36
|
classesToTest.forEach(({className, title, event, createFn, spyFn}) =>
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
sinon.restore();
|
|
27
|
-
});
|
|
28
|
-
|
|
29
|
-
it('by default allows unmuting', async () => {
|
|
30
|
-
assert.equal(track.isUnmuteAllowed(), true);
|
|
31
|
-
await track.setMuted(false);
|
|
32
|
-
})
|
|
37
|
+
describe(title, () => {
|
|
38
|
+
const fakeStream = {
|
|
39
|
+
getStreams: sinon.stub().returns([
|
|
40
|
+
{
|
|
41
|
+
label: 'fake Stream',
|
|
42
|
+
id: 'fake Stream id',
|
|
43
|
+
enabled: true,
|
|
44
|
+
},
|
|
45
|
+
]),
|
|
46
|
+
};
|
|
47
|
+
const stream = new className(fakeStream);
|
|
33
48
|
|
|
34
|
-
|
|
35
|
-
|
|
49
|
+
afterEach(() => {
|
|
50
|
+
sinon.restore();
|
|
51
|
+
});
|
|
36
52
|
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
53
|
+
it('by default allows unmuting', async () => {
|
|
54
|
+
assert.equal(stream.isUnmuteAllowed(), true);
|
|
55
|
+
await stream.setMuted(false);
|
|
56
|
+
});
|
|
41
57
|
|
|
42
|
-
|
|
43
|
-
|
|
58
|
+
it('rejects setMute(false) if unmute is not allowed', async () => {
|
|
59
|
+
stream.setUnmuteAllowed(false);
|
|
44
60
|
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
61
|
+
assert.equal(stream.isUnmuteAllowed(), false);
|
|
62
|
+
const fn = () => stream.setMuted(false);
|
|
63
|
+
expect(fn).to.throw(/Unmute is not allowed/);
|
|
64
|
+
});
|
|
48
65
|
|
|
49
|
-
|
|
66
|
+
it('resolves setMute(false) if unmute is allowed', async () => {
|
|
67
|
+
stream.setUnmuteAllowed(true);
|
|
50
68
|
|
|
51
|
-
|
|
52
|
-
|
|
69
|
+
assert.equal(stream.isUnmuteAllowed(), true);
|
|
70
|
+
await stream.setMuted(false);
|
|
53
71
|
});
|
|
54
72
|
|
|
55
|
-
|
|
56
|
-
|
|
73
|
+
describe('#setServerMuted', () => {
|
|
74
|
+
afterEach(() => {
|
|
75
|
+
sinon.restore();
|
|
76
|
+
});
|
|
57
77
|
|
|
58
|
-
|
|
78
|
+
const checkSetServerMuted = async (startMute, setMute, expectedCalled) => {
|
|
79
|
+
await stream.setMuted(startMute);
|
|
59
80
|
|
|
60
|
-
|
|
61
|
-
track.on(event.ServerMuted, handler);
|
|
81
|
+
assert.equal(stream.muted, startMute);
|
|
62
82
|
|
|
63
|
-
|
|
83
|
+
const handler = sinon.fake();
|
|
84
|
+
stream.on(event.ServerMuted, handler);
|
|
64
85
|
|
|
65
|
-
|
|
66
|
-
if (expectedCalled) {
|
|
67
|
-
assert.calledOnceWithExactly(handler, {muted: setMute, reason: 'remotelyMuted'});
|
|
68
|
-
} else {
|
|
69
|
-
assert.notCalled(handler);
|
|
70
|
-
}
|
|
71
|
-
};
|
|
86
|
+
await stream.setServerMuted(setMute, 'remotelyMuted');
|
|
72
87
|
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
88
|
+
assert.equal(stream.muted, setMute);
|
|
89
|
+
if (expectedCalled) {
|
|
90
|
+
assert.calledOnceWithExactly(handler, {muted: setMute, reason: 'remotelyMuted'});
|
|
91
|
+
} else {
|
|
92
|
+
assert.notCalled(handler);
|
|
93
|
+
}
|
|
94
|
+
};
|
|
95
|
+
|
|
96
|
+
it('tests true to false', async () => {
|
|
97
|
+
await checkSetServerMuted(true, false, true);
|
|
98
|
+
});
|
|
99
|
+
|
|
100
|
+
it('tests false to true', async () => {
|
|
101
|
+
await checkSetServerMuted(false, true, true);
|
|
102
|
+
});
|
|
103
|
+
|
|
104
|
+
it('tests true to true', async () => {
|
|
105
|
+
await checkSetServerMuted(true, true, false);
|
|
106
|
+
});
|
|
89
107
|
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
const spy = sinon.stub(wcmetracks, spyFn).returns('something');
|
|
95
|
-
const result = createFn(constraints);
|
|
96
|
-
|
|
97
|
-
assert.equal(result, 'something');
|
|
98
|
-
assert.calledOnceWithExactly(spy, className, constraints);
|
|
108
|
+
it('tests false to false', async () => {
|
|
109
|
+
await checkSetServerMuted(false, false, false);
|
|
110
|
+
});
|
|
99
111
|
});
|
|
100
|
-
});
|
|
101
112
|
|
|
102
|
-
|
|
103
|
-
|
|
113
|
+
describe('#wcmeCreateMicrophoneStream, #wcmeCreateCameraStream', () => {
|
|
114
|
+
it('checks creating Streams', async () => {
|
|
115
|
+
const constraints = {devideId: 'abc'};
|
|
116
|
+
|
|
117
|
+
const spy = sinon.stub(wcmeStreams, spyFn).returns('something');
|
|
118
|
+
const result = createFn(constraints);
|
|
104
119
|
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
120
|
+
assert.equal(result, 'something');
|
|
121
|
+
assert.calledOnceWithExactly(spy, className, constraints);
|
|
122
|
+
});
|
|
123
|
+
});
|
|
124
|
+
})
|
|
125
|
+
);
|
|
126
|
+
|
|
127
|
+
describe('createDisplayStream', () => {
|
|
128
|
+
it('checks createDisplayStream', async () => {
|
|
129
|
+
const spy = sinon.stub(wcmeStreams, 'createDisplayStream').returns('something');
|
|
130
|
+
const result = createDisplayStream();
|
|
131
|
+
assert.equal(result, 'something');
|
|
132
|
+
assert.calledOnceWithExactly(spy, LocalDisplayStream);
|
|
133
|
+
});
|
|
111
134
|
});
|
|
112
135
|
|
|
136
|
+
describe('createDisplayStreamWithAudio', () => {
|
|
137
|
+
it('checks createDisplayStreamWithAudio', async () => {
|
|
138
|
+
const spy = sinon.stub(wcmeStreams, 'createDisplayStreamWithAudio').returns('something');
|
|
139
|
+
const result = createDisplayStreamWithAudio();
|
|
140
|
+
assert.equal(result, 'something');
|
|
141
|
+
assert.calledOnceWithExactly(spy, LocalDisplayStream, LocalSystemAudioStream);
|
|
142
|
+
});
|
|
143
|
+
});
|
|
113
144
|
});
|
|
114
|
-
|
|
115
|
-
});
|
|
145
|
+
});
|
package/tsconfig.json
ADDED
package/dist/index.js
DELETED
|
@@ -1,68 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
|
|
3
|
-
var _Object$defineProperty = require("@babel/runtime-corejs2/core-js/object/define-property");
|
|
4
|
-
_Object$defineProperty(exports, "__esModule", {
|
|
5
|
-
value: true
|
|
6
|
-
});
|
|
7
|
-
_Object$defineProperty(exports, "LocalCameraTrack", {
|
|
8
|
-
enumerable: true,
|
|
9
|
-
get: function get() {
|
|
10
|
-
return _webrtcCore.LocalCameraTrack;
|
|
11
|
-
}
|
|
12
|
-
});
|
|
13
|
-
_Object$defineProperty(exports, "LocalCameraTrackEvents", {
|
|
14
|
-
enumerable: true,
|
|
15
|
-
get: function get() {
|
|
16
|
-
return _webrtcCore.LocalCameraTrackEvents;
|
|
17
|
-
}
|
|
18
|
-
});
|
|
19
|
-
_Object$defineProperty(exports, "LocalDisplayTrack", {
|
|
20
|
-
enumerable: true,
|
|
21
|
-
get: function get() {
|
|
22
|
-
return _webrtcCore.LocalDisplayTrack;
|
|
23
|
-
}
|
|
24
|
-
});
|
|
25
|
-
_Object$defineProperty(exports, "LocalMicrophoneTrack", {
|
|
26
|
-
enumerable: true,
|
|
27
|
-
get: function get() {
|
|
28
|
-
return _webrtcCore.LocalMicrophoneTrack;
|
|
29
|
-
}
|
|
30
|
-
});
|
|
31
|
-
_Object$defineProperty(exports, "LocalMicrophoneTrackEvents", {
|
|
32
|
-
enumerable: true,
|
|
33
|
-
get: function get() {
|
|
34
|
-
return _webrtcCore.LocalMicrophoneTrackEvents;
|
|
35
|
-
}
|
|
36
|
-
});
|
|
37
|
-
_Object$defineProperty(exports, "LocalTrack", {
|
|
38
|
-
enumerable: true,
|
|
39
|
-
get: function get() {
|
|
40
|
-
return _webrtcCore.LocalTrack;
|
|
41
|
-
}
|
|
42
|
-
});
|
|
43
|
-
_Object$defineProperty(exports, "LocalTrackEvents", {
|
|
44
|
-
enumerable: true,
|
|
45
|
-
get: function get() {
|
|
46
|
-
return _webrtcCore.LocalTrackEvents;
|
|
47
|
-
}
|
|
48
|
-
});
|
|
49
|
-
_Object$defineProperty(exports, "createCameraTrack", {
|
|
50
|
-
enumerable: true,
|
|
51
|
-
get: function get() {
|
|
52
|
-
return _webrtcCore.createCameraTrack;
|
|
53
|
-
}
|
|
54
|
-
});
|
|
55
|
-
_Object$defineProperty(exports, "createDisplayTrack", {
|
|
56
|
-
enumerable: true,
|
|
57
|
-
get: function get() {
|
|
58
|
-
return _webrtcCore.createDisplayTrack;
|
|
59
|
-
}
|
|
60
|
-
});
|
|
61
|
-
_Object$defineProperty(exports, "createMicrophoneTrack", {
|
|
62
|
-
enumerable: true,
|
|
63
|
-
get: function get() {
|
|
64
|
-
return _webrtcCore.createMicrophoneTrack;
|
|
65
|
-
}
|
|
66
|
-
});
|
|
67
|
-
var _webrtcCore = require("./webrtc-core");
|
|
68
|
-
//# sourceMappingURL=index.js.map
|
package/dist/index.js.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"names":[],"sources":["index.ts"],"sourcesContent":["export {\n LocalTrack,\n LocalDisplayTrack,\n LocalTrackEvents,\n type TrackMuteEvent,\n type ServerMuteReason,\n LocalMicrophoneTrackEvents,\n LocalCameraTrackEvents,\n LocalMicrophoneTrack,\n LocalCameraTrack,\n createMicrophoneTrack,\n createCameraTrack,\n createDisplayTrack,\n} from './webrtc-core';\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA"}
|
package/dist/webrtc-core.js
DELETED
|
@@ -1,185 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
|
|
3
|
-
var _Reflect$construct = require("@babel/runtime-corejs2/core-js/reflect/construct");
|
|
4
|
-
var _Object$defineProperty = require("@babel/runtime-corejs2/core-js/object/define-property");
|
|
5
|
-
var _interopRequireDefault = require("@babel/runtime-corejs2/helpers/interopRequireDefault");
|
|
6
|
-
_Object$defineProperty(exports, "__esModule", {
|
|
7
|
-
value: true
|
|
8
|
-
});
|
|
9
|
-
exports.LocalCameraTrackEvents = exports.LocalCameraTrack = void 0;
|
|
10
|
-
_Object$defineProperty(exports, "LocalDisplayTrack", {
|
|
11
|
-
enumerable: true,
|
|
12
|
-
get: function get() {
|
|
13
|
-
return _internalMediaCore.LocalDisplayTrack;
|
|
14
|
-
}
|
|
15
|
-
});
|
|
16
|
-
exports.LocalMicrophoneTrackEvents = exports.LocalMicrophoneTrack = void 0;
|
|
17
|
-
_Object$defineProperty(exports, "LocalTrack", {
|
|
18
|
-
enumerable: true,
|
|
19
|
-
get: function get() {
|
|
20
|
-
return _internalMediaCore.LocalTrack;
|
|
21
|
-
}
|
|
22
|
-
});
|
|
23
|
-
_Object$defineProperty(exports, "LocalTrackEvents", {
|
|
24
|
-
enumerable: true,
|
|
25
|
-
get: function get() {
|
|
26
|
-
return _internalMediaCore.LocalTrackEvents;
|
|
27
|
-
}
|
|
28
|
-
});
|
|
29
|
-
exports.createMicrophoneTrack = exports.createDisplayTrack = exports.createCameraTrack = void 0;
|
|
30
|
-
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime-corejs2/helpers/classCallCheck"));
|
|
31
|
-
var _createClass2 = _interopRequireDefault(require("@babel/runtime-corejs2/helpers/createClass"));
|
|
32
|
-
var _assertThisInitialized2 = _interopRequireDefault(require("@babel/runtime-corejs2/helpers/assertThisInitialized"));
|
|
33
|
-
var _get2 = _interopRequireDefault(require("@babel/runtime-corejs2/helpers/get"));
|
|
34
|
-
var _inherits2 = _interopRequireDefault(require("@babel/runtime-corejs2/helpers/inherits"));
|
|
35
|
-
var _possibleConstructorReturn2 = _interopRequireDefault(require("@babel/runtime-corejs2/helpers/possibleConstructorReturn"));
|
|
36
|
-
var _getPrototypeOf2 = _interopRequireDefault(require("@babel/runtime-corejs2/helpers/getPrototypeOf"));
|
|
37
|
-
var _defineProperty2 = _interopRequireDefault(require("@babel/runtime-corejs2/helpers/defineProperty"));
|
|
38
|
-
var _internalMediaCore = require("@webex/internal-media-core");
|
|
39
|
-
function _createSuper(Derived) { var hasNativeReflectConstruct = _isNativeReflectConstruct(); return function _createSuperInternal() { var Super = (0, _getPrototypeOf2.default)(Derived), result; if (hasNativeReflectConstruct) { var NewTarget = (0, _getPrototypeOf2.default)(this).constructor; result = _Reflect$construct(Super, arguments, NewTarget); } else { result = Super.apply(this, arguments); } return (0, _possibleConstructorReturn2.default)(this, result); }; }
|
|
40
|
-
function _isNativeReflectConstruct() { if (typeof Reflect === "undefined" || !_Reflect$construct) return false; if (_Reflect$construct.sham) return false; if (typeof Proxy === "function") return true; try { Boolean.prototype.valueOf.call(_Reflect$construct(Boolean, [], function () {})); return true; } catch (e) { return false; } }
|
|
41
|
-
// server forced the client to be unmuted
|
|
42
|
-
// these events are in addition to WCME events. This will be properly typed once webrtc-core event types inheritance is fixed
|
|
43
|
-
var LocalMicrophoneTrackEvents; // these events are in addition to WCME events. This will be properly typed once webrtc-core event types inheritance is fixed
|
|
44
|
-
exports.LocalMicrophoneTrackEvents = LocalMicrophoneTrackEvents;
|
|
45
|
-
(function (LocalMicrophoneTrackEvents) {
|
|
46
|
-
LocalMicrophoneTrackEvents["ServerMuted"] = "muted:byServer";
|
|
47
|
-
})(LocalMicrophoneTrackEvents || (exports.LocalMicrophoneTrackEvents = LocalMicrophoneTrackEvents = {}));
|
|
48
|
-
var LocalCameraTrackEvents;
|
|
49
|
-
exports.LocalCameraTrackEvents = LocalCameraTrackEvents;
|
|
50
|
-
(function (LocalCameraTrackEvents) {
|
|
51
|
-
LocalCameraTrackEvents["ServerMuted"] = "muted:byServer";
|
|
52
|
-
})(LocalCameraTrackEvents || (exports.LocalCameraTrackEvents = LocalCameraTrackEvents = {}));
|
|
53
|
-
var LocalMicrophoneTrack = /*#__PURE__*/function (_WcmeLocalMicrophoneT) {
|
|
54
|
-
(0, _inherits2.default)(LocalMicrophoneTrack, _WcmeLocalMicrophoneT);
|
|
55
|
-
var _super = _createSuper(LocalMicrophoneTrack);
|
|
56
|
-
function LocalMicrophoneTrack() {
|
|
57
|
-
var _this;
|
|
58
|
-
(0, _classCallCheck2.default)(this, LocalMicrophoneTrack);
|
|
59
|
-
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
|
|
60
|
-
args[_key] = arguments[_key];
|
|
61
|
-
}
|
|
62
|
-
_this = _super.call.apply(_super, [this].concat(args));
|
|
63
|
-
(0, _defineProperty2.default)((0, _assertThisInitialized2.default)(_this), "unmuteAllowed", true);
|
|
64
|
-
return _this;
|
|
65
|
-
}
|
|
66
|
-
(0, _createClass2.default)(LocalMicrophoneTrack, [{
|
|
67
|
-
key: "setUnmuteAllowed",
|
|
68
|
-
value:
|
|
69
|
-
/**
|
|
70
|
-
* @internal
|
|
71
|
-
*/
|
|
72
|
-
function setUnmuteAllowed(allowed) {
|
|
73
|
-
this.unmuteAllowed = allowed;
|
|
74
|
-
}
|
|
75
|
-
|
|
76
|
-
/**
|
|
77
|
-
* @returns true if user is allowed to unmute the track, false otherwise
|
|
78
|
-
*/
|
|
79
|
-
}, {
|
|
80
|
-
key: "isUnmuteAllowed",
|
|
81
|
-
value: function isUnmuteAllowed() {
|
|
82
|
-
return this.unmuteAllowed;
|
|
83
|
-
}
|
|
84
|
-
}, {
|
|
85
|
-
key: "setMuted",
|
|
86
|
-
value: function setMuted(muted) {
|
|
87
|
-
if (!muted) {
|
|
88
|
-
if (!this.isUnmuteAllowed()) {
|
|
89
|
-
throw new Error('Unmute is not allowed');
|
|
90
|
-
}
|
|
91
|
-
}
|
|
92
|
-
return (0, _get2.default)((0, _getPrototypeOf2.default)(LocalMicrophoneTrack.prototype), "setMuted", this).call(this, muted);
|
|
93
|
-
}
|
|
94
|
-
|
|
95
|
-
/**
|
|
96
|
-
* @internal
|
|
97
|
-
*/
|
|
98
|
-
}, {
|
|
99
|
-
key: "setServerMuted",
|
|
100
|
-
value: function setServerMuted(muted, reason) {
|
|
101
|
-
if (muted !== this.muted) {
|
|
102
|
-
this.setMuted(muted);
|
|
103
|
-
this.emit(LocalMicrophoneTrackEvents.ServerMuted, {
|
|
104
|
-
muted: muted,
|
|
105
|
-
reason: reason
|
|
106
|
-
});
|
|
107
|
-
}
|
|
108
|
-
}
|
|
109
|
-
}]);
|
|
110
|
-
return LocalMicrophoneTrack;
|
|
111
|
-
}(_internalMediaCore.LocalMicrophoneTrack);
|
|
112
|
-
exports.LocalMicrophoneTrack = LocalMicrophoneTrack;
|
|
113
|
-
var LocalCameraTrack = /*#__PURE__*/function (_WcmeLocalCameraTrack) {
|
|
114
|
-
(0, _inherits2.default)(LocalCameraTrack, _WcmeLocalCameraTrack);
|
|
115
|
-
var _super2 = _createSuper(LocalCameraTrack);
|
|
116
|
-
function LocalCameraTrack() {
|
|
117
|
-
var _this2;
|
|
118
|
-
(0, _classCallCheck2.default)(this, LocalCameraTrack);
|
|
119
|
-
for (var _len2 = arguments.length, args = new Array(_len2), _key2 = 0; _key2 < _len2; _key2++) {
|
|
120
|
-
args[_key2] = arguments[_key2];
|
|
121
|
-
}
|
|
122
|
-
_this2 = _super2.call.apply(_super2, [this].concat(args));
|
|
123
|
-
(0, _defineProperty2.default)((0, _assertThisInitialized2.default)(_this2), "unmuteAllowed", true);
|
|
124
|
-
return _this2;
|
|
125
|
-
}
|
|
126
|
-
(0, _createClass2.default)(LocalCameraTrack, [{
|
|
127
|
-
key: "setUnmuteAllowed",
|
|
128
|
-
value:
|
|
129
|
-
/**
|
|
130
|
-
* @internal
|
|
131
|
-
*/
|
|
132
|
-
function setUnmuteAllowed(allowed) {
|
|
133
|
-
this.unmuteAllowed = allowed;
|
|
134
|
-
}
|
|
135
|
-
|
|
136
|
-
/**
|
|
137
|
-
* @returns true if user is allowed to unmute the track, false otherwise
|
|
138
|
-
*/
|
|
139
|
-
}, {
|
|
140
|
-
key: "isUnmuteAllowed",
|
|
141
|
-
value: function isUnmuteAllowed() {
|
|
142
|
-
return this.unmuteAllowed;
|
|
143
|
-
}
|
|
144
|
-
}, {
|
|
145
|
-
key: "setMuted",
|
|
146
|
-
value: function setMuted(muted) {
|
|
147
|
-
if (!muted) {
|
|
148
|
-
if (!this.isUnmuteAllowed()) {
|
|
149
|
-
throw new Error('Unmute is not allowed');
|
|
150
|
-
}
|
|
151
|
-
}
|
|
152
|
-
return (0, _get2.default)((0, _getPrototypeOf2.default)(LocalCameraTrack.prototype), "setMuted", this).call(this, muted);
|
|
153
|
-
}
|
|
154
|
-
|
|
155
|
-
/**
|
|
156
|
-
* @internal
|
|
157
|
-
*/
|
|
158
|
-
}, {
|
|
159
|
-
key: "setServerMuted",
|
|
160
|
-
value: function setServerMuted(muted, reason) {
|
|
161
|
-
if (muted !== this.muted) {
|
|
162
|
-
this.setMuted(muted);
|
|
163
|
-
this.emit(LocalCameraTrackEvents.ServerMuted, {
|
|
164
|
-
muted: muted,
|
|
165
|
-
reason: reason
|
|
166
|
-
});
|
|
167
|
-
}
|
|
168
|
-
}
|
|
169
|
-
}]);
|
|
170
|
-
return LocalCameraTrack;
|
|
171
|
-
}(_internalMediaCore.LocalCameraTrack);
|
|
172
|
-
exports.LocalCameraTrack = LocalCameraTrack;
|
|
173
|
-
var createMicrophoneTrack = function createMicrophoneTrack(constraints) {
|
|
174
|
-
return (0, _internalMediaCore.createMicrophoneTrack)(LocalMicrophoneTrack, constraints);
|
|
175
|
-
};
|
|
176
|
-
exports.createMicrophoneTrack = createMicrophoneTrack;
|
|
177
|
-
var createCameraTrack = function createCameraTrack(constraints) {
|
|
178
|
-
return (0, _internalMediaCore.createCameraTrack)(LocalCameraTrack, constraints);
|
|
179
|
-
};
|
|
180
|
-
exports.createCameraTrack = createCameraTrack;
|
|
181
|
-
var createDisplayTrack = function createDisplayTrack() {
|
|
182
|
-
return (0, _internalMediaCore.createDisplayTrack)(_internalMediaCore.LocalDisplayTrack);
|
|
183
|
-
};
|
|
184
|
-
exports.createDisplayTrack = createDisplayTrack;
|
|
185
|
-
//# sourceMappingURL=webrtc-core.js.map
|
package/dist/webrtc-core.js.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"names":["LocalMicrophoneTrackEvents","LocalCameraTrackEvents","LocalMicrophoneTrack","allowed","unmuteAllowed","muted","isUnmuteAllowed","Error","reason","setMuted","emit","ServerMuted","WcmeLocalMicrophoneTrack","LocalCameraTrack","WcmeLocalCameraTrack","createMicrophoneTrack","constraints","wcmeCreateMicrophoneTrack","createCameraTrack","wcmeCreateCameraTrack","createDisplayTrack","wcmeCreateDisplayTrack","LocalDisplayTrack"],"sources":["webrtc-core.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/no-misused-new */\n/* eslint-disable valid-jsdoc */\n/* eslint-disable require-jsdoc */\nimport {\n AudioDeviceConstraints,\n createCameraTrack as wcmeCreateCameraTrack,\n createDisplayTrack as wcmeCreateDisplayTrack,\n createMicrophoneTrack as wcmeCreateMicrophoneTrack,\n LocalDisplayTrack,\n LocalMicrophoneTrack as WcmeLocalMicrophoneTrack,\n LocalCameraTrack as WcmeLocalCameraTrack,\n VideoDeviceConstraints,\n} from '@webex/internal-media-core';\n\nexport {\n LocalTrack,\n LocalDisplayTrack,\n LocalTrackEvents,\n type TrackMuteEvent,\n} from '@webex/internal-media-core';\n\nexport type ServerMuteReason =\n | 'remotelyMuted' // other user has remotely muted us\n | 'clientRequestFailed' // client called setMuted() but server request failed\n | 'localUnmuteRequired'; // server forced the client to be unmuted\n\n// these events are in addition to WCME events. This will be properly typed once webrtc-core event types inheritance is fixed\nexport enum LocalMicrophoneTrackEvents {\n ServerMuted = 'muted:byServer',\n}\n\n// these events are in addition to WCME events. This will be properly typed once webrtc-core event types inheritance is fixed\nexport enum LocalCameraTrackEvents {\n ServerMuted = 'muted:byServer',\n}\n\nexport class LocalMicrophoneTrack extends WcmeLocalMicrophoneTrack {\n private unmuteAllowed = true;\n\n /**\n * @internal\n */\n setUnmuteAllowed(allowed) {\n this.unmuteAllowed = allowed;\n }\n\n /**\n * @returns true if user is allowed to unmute the track, false otherwise\n */\n isUnmuteAllowed() {\n return this.unmuteAllowed;\n }\n\n setMuted(muted: boolean): void {\n if (!muted) {\n if (!this.isUnmuteAllowed()) {\n throw new Error('Unmute is not allowed');\n }\n }\n\n return super.setMuted(muted);\n }\n\n /**\n * @internal\n */\n setServerMuted(muted: boolean, reason: ServerMuteReason) {\n if (muted !== this.muted) {\n this.setMuted(muted);\n this.emit(LocalMicrophoneTrackEvents.ServerMuted, {muted, reason});\n }\n }\n}\n\nexport class LocalCameraTrack extends WcmeLocalCameraTrack {\n private unmuteAllowed = true;\n\n /**\n * @internal\n */\n setUnmuteAllowed(allowed) {\n this.unmuteAllowed = allowed;\n }\n\n /**\n * @returns true if user is allowed to unmute the track, false otherwise\n */\n isUnmuteAllowed() {\n return this.unmuteAllowed;\n }\n\n setMuted(muted: boolean): void {\n if (!muted) {\n if (!this.isUnmuteAllowed()) {\n throw new Error('Unmute is not allowed');\n }\n }\n\n return super.setMuted(muted);\n }\n\n /**\n * @internal\n */\n setServerMuted(muted: boolean, reason: ServerMuteReason) {\n if (muted !== this.muted) {\n this.setMuted(muted);\n this.emit(LocalCameraTrackEvents.ServerMuted, {muted, reason});\n }\n }\n}\n\nexport const createMicrophoneTrack = (constraints?: AudioDeviceConstraints) =>\n wcmeCreateMicrophoneTrack(LocalMicrophoneTrack, constraints);\n\nexport const createCameraTrack = (constraints?: VideoDeviceConstraints) =>\n wcmeCreateCameraTrack(LocalCameraTrack, constraints);\n\nexport const createDisplayTrack = () => wcmeCreateDisplayTrack(LocalDisplayTrack);\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAGA;AASoC;AAAA;AAYT;AAE3B;AAAA,IACYA,0BAA0B,EAItC;AAAA;AAAA,WAJYA,0BAA0B;EAA1BA,0BAA0B;AAAA,GAA1BA,0BAA0B,0CAA1BA,0BAA0B;AAAA,IAK1BC,sBAAsB;AAAA;AAAA,WAAtBA,sBAAsB;EAAtBA,sBAAsB;AAAA,GAAtBA,sBAAsB,sCAAtBA,sBAAsB;AAAA,IAIrBC,oBAAoB;EAAA;EAAA;EAAA;IAAA;IAAA;IAAA;MAAA;IAAA;IAAA;IAAA,4FACP,IAAI;IAAA;EAAA;EAAA;IAAA;IAAA;IAE5B;AACF;AACA;IACE,0BAAiBC,OAAO,EAAE;MACxB,IAAI,CAACC,aAAa,GAAGD,OAAO;IAC9B;;IAEA;AACF;AACA;EAFE;IAAA;IAAA,OAGA,2BAAkB;MAChB,OAAO,IAAI,CAACC,aAAa;IAC3B;EAAC;IAAA;IAAA,OAED,kBAASC,KAAc,EAAQ;MAC7B,IAAI,CAACA,KAAK,EAAE;QACV,IAAI,CAAC,IAAI,CAACC,eAAe,EAAE,EAAE;UAC3B,MAAM,IAAIC,KAAK,CAAC,uBAAuB,CAAC;QAC1C;MACF;MAEA,sHAAsBF,KAAK;IAC7B;;IAEA;AACF;AACA;EAFE;IAAA;IAAA,OAGA,wBAAeA,KAAc,EAAEG,MAAwB,EAAE;MACvD,IAAIH,KAAK,KAAK,IAAI,CAACA,KAAK,EAAE;QACxB,IAAI,CAACI,QAAQ,CAACJ,KAAK,CAAC;QACpB,IAAI,CAACK,IAAI,CAACV,0BAA0B,CAACW,WAAW,EAAE;UAACN,KAAK,EAALA,KAAK;UAAEG,MAAM,EAANA;QAAM,CAAC,CAAC;MACpE;IACF;EAAC;EAAA;AAAA,EAnCuCI,uCAAwB;AAAA;AAAA,IAsCrDC,gBAAgB;EAAA;EAAA;EAAA;IAAA;IAAA;IAAA;MAAA;IAAA;IAAA;IAAA,6FACH,IAAI;IAAA;EAAA;EAAA;IAAA;IAAA;IAE5B;AACF;AACA;IACE,0BAAiBV,OAAO,EAAE;MACxB,IAAI,CAACC,aAAa,GAAGD,OAAO;IAC9B;;IAEA;AACF;AACA;EAFE;IAAA;IAAA,OAGA,2BAAkB;MAChB,OAAO,IAAI,CAACC,aAAa;IAC3B;EAAC;IAAA;IAAA,OAED,kBAASC,KAAc,EAAQ;MAC7B,IAAI,CAACA,KAAK,EAAE;QACV,IAAI,CAAC,IAAI,CAACC,eAAe,EAAE,EAAE;UAC3B,MAAM,IAAIC,KAAK,CAAC,uBAAuB,CAAC;QAC1C;MACF;MAEA,kHAAsBF,KAAK;IAC7B;;IAEA;AACF;AACA;EAFE;IAAA;IAAA,OAGA,wBAAeA,KAAc,EAAEG,MAAwB,EAAE;MACvD,IAAIH,KAAK,KAAK,IAAI,CAACA,KAAK,EAAE;QACxB,IAAI,CAACI,QAAQ,CAACJ,KAAK,CAAC;QACpB,IAAI,CAACK,IAAI,CAACT,sBAAsB,CAACU,WAAW,EAAE;UAACN,KAAK,EAALA,KAAK;UAAEG,MAAM,EAANA;QAAM,CAAC,CAAC;MAChE;IACF;EAAC;EAAA;AAAA,EAnCmCM,mCAAoB;AAAA;AAsCnD,IAAMC,qBAAqB,GAAG,SAAxBA,qBAAqB,CAAIC,WAAoC;EAAA,OACxE,IAAAC,wCAAyB,EAACf,oBAAoB,EAAEc,WAAW,CAAC;AAAA;AAAC;AAExD,IAAME,iBAAiB,GAAG,SAApBA,iBAAiB,CAAIF,WAAoC;EAAA,OACpE,IAAAG,oCAAqB,EAACN,gBAAgB,EAAEG,WAAW,CAAC;AAAA;AAAC;AAEhD,IAAMI,kBAAkB,GAAG,SAArBA,kBAAkB;EAAA,OAAS,IAAAC,qCAAsB,EAACC,oCAAiB,CAAC;AAAA;AAAC"}
|