@signalapp/ringrtc 2.50.5 → 2.51.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/acknowledgments.md +1 -1
- package/dist/index.d.ts +1 -2
- package/dist/index.js +3 -8
- package/dist/ringrtc/Service.d.ts +48 -26
- package/dist/ringrtc/Service.js +94 -161
- package/package.json +2 -4
- package/dist/ringrtc/VideoSupport.d.ts +0 -77
- package/dist/ringrtc/VideoSupport.js +0 -433
package/dist/acknowledgments.md
CHANGED
|
@@ -669,7 +669,7 @@ For more information on this, and how to apply and follow the GNU AGPL, see
|
|
|
669
669
|
|
|
670
670
|
```
|
|
671
671
|
|
|
672
|
-
## libsignal-account-keys 0.1.0, libsignal-core 0.1.0, mrp 2.
|
|
672
|
+
## libsignal-account-keys 0.1.0, libsignal-core 0.1.0, mrp 2.51.0, protobuf 2.51.0, ringrtc 2.51.0, regex-aot 0.1.0, partial-default-derive 0.1.0
|
|
673
673
|
|
|
674
674
|
```
|
|
675
675
|
GNU AFFERO GENERAL PUBLIC LICENSE
|
package/dist/index.d.ts
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
import { RingRTCType } from './ringrtc/Service';
|
|
2
|
-
export { AnswerMessage, AudioDevice, DataMode, BusyMessage, Call, CallEndedReason, CallId, CallLogLevel, CallMessageUrgency, CallSettings, CallState, CallingMessage, ConnectionState, DeviceId, GroupCall, GroupCallEndReason, GroupCallKind, GroupCallObserver, GroupMemberInfo, HangupMessage, HangupType, HttpMethod, HttpResult, IceCandidateMessage, JoinState, LocalDeviceState, OfferMessage, OfferType, OpaqueMessage, PeekDeviceInfo, PeekInfo, PeekStatusCodes, Reaction, RemoteDeviceState, RingCancelReason, RingRTCType, RingUpdate, SpeechEvent, UserId,
|
|
3
|
-
export { CanvasVideoRenderer, GumVideoCapturer, VideoFrameSource, MAX_VIDEO_CAPTURE_AREA, MAX_VIDEO_CAPTURE_BUFFER_SIZE, MAX_VIDEO_CAPTURE_HEIGHT, MAX_VIDEO_CAPTURE_WIDTH, } from './ringrtc/VideoSupport';
|
|
2
|
+
export { AnswerMessage, AudioDevice, DataMode, BusyMessage, Call, CallEndedReason, CallId, CallLogLevel, CallMessageUrgency, CallSettings, CallState, CallingMessage, ConnectionState, DeviceId, GroupCall, GroupCallEndReason, GroupCallKind, GroupCallObserver, GroupMemberInfo, HangupMessage, HangupType, HttpMethod, HttpResult, IceCandidateMessage, JoinState, LocalDeviceState, OfferMessage, OfferType, OpaqueMessage, PeekDeviceInfo, PeekInfo, PeekStatusCodes, Reaction, RemoteDeviceState, RingCancelReason, RingRTCType, RingUpdate, SpeechEvent, UserId, VideoFrameSender, VideoFrameSource, VideoPixelFormatEnum, videoPixelFormatToEnum, VideoRequest, callIdFromEra, callIdFromRingId, } from './ringrtc/Service';
|
|
4
3
|
export { CallLinkRootKey, CallLinkRestrictions, CallLinkState, } from './ringrtc/CallLinks';
|
|
5
4
|
export declare const RingRTC: RingRTCType;
|
package/dist/index.js
CHANGED
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
// SPDX-License-Identifier: AGPL-3.0-only
|
|
5
5
|
//
|
|
6
6
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
-
exports.RingRTC = exports.CallLinkState = exports.CallLinkRestrictions = exports.CallLinkRootKey = exports.
|
|
7
|
+
exports.RingRTC = exports.CallLinkState = exports.CallLinkRestrictions = exports.CallLinkRootKey = exports.callIdFromRingId = exports.callIdFromEra = exports.VideoRequest = exports.videoPixelFormatToEnum = exports.VideoPixelFormatEnum = exports.SpeechEvent = exports.RingUpdate = exports.RingRTCType = exports.RingCancelReason = exports.RemoteDeviceState = exports.PeekStatusCodes = exports.OpaqueMessage = exports.OfferType = exports.OfferMessage = exports.LocalDeviceState = exports.JoinState = exports.IceCandidateMessage = exports.HttpMethod = exports.HangupType = exports.HangupMessage = exports.GroupMemberInfo = exports.GroupCallKind = exports.GroupCallEndReason = exports.GroupCall = exports.ConnectionState = exports.CallingMessage = exports.CallState = exports.CallMessageUrgency = exports.CallLogLevel = exports.CallEndedReason = exports.Call = exports.BusyMessage = exports.DataMode = exports.AnswerMessage = void 0;
|
|
8
8
|
const Service_1 = require("./ringrtc/Service");
|
|
9
9
|
var Service_2 = require("./ringrtc/Service");
|
|
10
10
|
Object.defineProperty(exports, "AnswerMessage", { enumerable: true, get: function () { return Service_2.AnswerMessage; } });
|
|
@@ -36,16 +36,11 @@ Object.defineProperty(exports, "RingCancelReason", { enumerable: true, get: func
|
|
|
36
36
|
Object.defineProperty(exports, "RingRTCType", { enumerable: true, get: function () { return Service_2.RingRTCType; } });
|
|
37
37
|
Object.defineProperty(exports, "RingUpdate", { enumerable: true, get: function () { return Service_2.RingUpdate; } });
|
|
38
38
|
Object.defineProperty(exports, "SpeechEvent", { enumerable: true, get: function () { return Service_2.SpeechEvent; } });
|
|
39
|
+
Object.defineProperty(exports, "VideoPixelFormatEnum", { enumerable: true, get: function () { return Service_2.VideoPixelFormatEnum; } });
|
|
40
|
+
Object.defineProperty(exports, "videoPixelFormatToEnum", { enumerable: true, get: function () { return Service_2.videoPixelFormatToEnum; } });
|
|
39
41
|
Object.defineProperty(exports, "VideoRequest", { enumerable: true, get: function () { return Service_2.VideoRequest; } });
|
|
40
42
|
Object.defineProperty(exports, "callIdFromEra", { enumerable: true, get: function () { return Service_2.callIdFromEra; } });
|
|
41
43
|
Object.defineProperty(exports, "callIdFromRingId", { enumerable: true, get: function () { return Service_2.callIdFromRingId; } });
|
|
42
|
-
var VideoSupport_1 = require("./ringrtc/VideoSupport");
|
|
43
|
-
Object.defineProperty(exports, "CanvasVideoRenderer", { enumerable: true, get: function () { return VideoSupport_1.CanvasVideoRenderer; } });
|
|
44
|
-
Object.defineProperty(exports, "GumVideoCapturer", { enumerable: true, get: function () { return VideoSupport_1.GumVideoCapturer; } });
|
|
45
|
-
Object.defineProperty(exports, "MAX_VIDEO_CAPTURE_AREA", { enumerable: true, get: function () { return VideoSupport_1.MAX_VIDEO_CAPTURE_AREA; } });
|
|
46
|
-
Object.defineProperty(exports, "MAX_VIDEO_CAPTURE_BUFFER_SIZE", { enumerable: true, get: function () { return VideoSupport_1.MAX_VIDEO_CAPTURE_BUFFER_SIZE; } });
|
|
47
|
-
Object.defineProperty(exports, "MAX_VIDEO_CAPTURE_HEIGHT", { enumerable: true, get: function () { return VideoSupport_1.MAX_VIDEO_CAPTURE_HEIGHT; } });
|
|
48
|
-
Object.defineProperty(exports, "MAX_VIDEO_CAPTURE_WIDTH", { enumerable: true, get: function () { return VideoSupport_1.MAX_VIDEO_CAPTURE_WIDTH; } });
|
|
49
44
|
var CallLinks_1 = require("./ringrtc/CallLinks");
|
|
50
45
|
Object.defineProperty(exports, "CallLinkRootKey", { enumerable: true, get: function () { return CallLinks_1.CallLinkRootKey; } });
|
|
51
46
|
Object.defineProperty(exports, "CallLinkRestrictions", { enumerable: true, get: function () { return CallLinks_1.CallLinkRestrictions; } });
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import { GumVideoCaptureOptions, VideoPixelFormatEnum } from './VideoSupport';
|
|
2
1
|
import { CallLinkState, CallLinkRestrictions, CallLinkRootKey } from './CallLinks';
|
|
3
2
|
export declare const callIdFromEra: (era: string) => CallId;
|
|
4
3
|
export declare function callIdFromRingId(ringId: bigint): CallId;
|
|
@@ -270,15 +269,10 @@ export declare class RingRTCType {
|
|
|
270
269
|
sendCallMessageToGroup(groupId: Buffer, message: Buffer, urgency: CallMessageUrgency, overrideRecipients: Array<Buffer>): void;
|
|
271
270
|
get call(): Call | null;
|
|
272
271
|
getCall(callId: CallId): Call | null;
|
|
273
|
-
accept(callId: CallId
|
|
272
|
+
accept(callId: CallId): void;
|
|
274
273
|
decline(callId: CallId): void;
|
|
275
274
|
ignore(callId: CallId): void;
|
|
276
275
|
hangup(callId: CallId): void;
|
|
277
|
-
setOutgoingAudio(callId: CallId, enabled: boolean): void;
|
|
278
|
-
setOutgoingVideo(callId: CallId, enabled: boolean): void;
|
|
279
|
-
setOutgoingVideoIsScreenShare(callId: CallId, isScreenShare: boolean): void;
|
|
280
|
-
setVideoCapturer(callId: CallId, capturer: VideoCapturer | null): void;
|
|
281
|
-
setVideoRenderer(callId: CallId, renderer: VideoRenderer | null): void;
|
|
282
276
|
getAudioInputs(): Array<AudioDevice>;
|
|
283
277
|
setAudioInput(index: number): void;
|
|
284
278
|
getAudioOutputs(): Array<AudioDevice>;
|
|
@@ -303,14 +297,50 @@ export interface AudioDevice {
|
|
|
303
297
|
uniqueId: string;
|
|
304
298
|
i18nKey?: string;
|
|
305
299
|
}
|
|
306
|
-
export
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
300
|
+
export declare enum VideoPixelFormatEnum {
|
|
301
|
+
I420 = 0,
|
|
302
|
+
Nv12 = 1,
|
|
303
|
+
Rgba = 2
|
|
304
|
+
}
|
|
305
|
+
export declare function videoPixelFormatToEnum(format: VideoPixelFormat): VideoPixelFormatEnum | undefined;
|
|
306
|
+
/**
|
|
307
|
+
* Interface for sending video frames to the RingRTC library.
|
|
308
|
+
*
|
|
309
|
+
* VideoFrameSender is used to transmit video frames (from a camera or screen share) over
|
|
310
|
+
* RTP via the RingRTC library.
|
|
311
|
+
*/
|
|
312
|
+
export interface VideoFrameSender {
|
|
313
|
+
/**
|
|
314
|
+
* Sends a video frame to be transmitted via RingRTC.
|
|
315
|
+
*
|
|
316
|
+
* @param width - The width of the video frame in pixels
|
|
317
|
+
* @param height - The height of the video frame in pixels
|
|
318
|
+
* @param format - The pixel format of the video data
|
|
319
|
+
* @param buffer - Buffer containing the raw video frame data
|
|
320
|
+
*/
|
|
321
|
+
sendVideoFrame(width: number, height: number, format: VideoPixelFormatEnum, buffer: Buffer): void;
|
|
310
322
|
}
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
323
|
+
/**
|
|
324
|
+
* Interface for retrieving received video frames from the RingRTC library.
|
|
325
|
+
*/
|
|
326
|
+
export interface VideoFrameSource {
|
|
327
|
+
/**
|
|
328
|
+
* Copies the latest frame into `buffer`.
|
|
329
|
+
*
|
|
330
|
+
* Note that `maxWidth` and `maxHeight` specify maximum dimensions, but allow for rotation,
|
|
331
|
+
* i.e. a maximum of 1920x1080 will also allow portrait-mode 1080x1920.
|
|
332
|
+
*
|
|
333
|
+
* @param buffer - The destination buffer where the frame will be copied
|
|
334
|
+
* @param maxWidth - Maximum width of the frame to receive
|
|
335
|
+
* @param maxHeight - Maximum height of the frame to receive
|
|
336
|
+
* @returns
|
|
337
|
+
* A tuple of [width, height] of the received frame, containing:
|
|
338
|
+
* - The width in pixels of the received frame
|
|
339
|
+
* - The height in pixels of the received frame
|
|
340
|
+
*
|
|
341
|
+
* Returns undefined if no new frame is available
|
|
342
|
+
*/
|
|
343
|
+
receiveVideoFrame(buffer: Buffer, maxWidth: number, maxHeight: number): [number, number] | undefined;
|
|
314
344
|
}
|
|
315
345
|
export declare class Call {
|
|
316
346
|
private readonly _callManager;
|
|
@@ -319,6 +349,7 @@ export declare class Call {
|
|
|
319
349
|
private readonly _isIncoming;
|
|
320
350
|
private readonly _isVideoCall;
|
|
321
351
|
private _state;
|
|
352
|
+
private _mediaSessionStarted;
|
|
322
353
|
private _outgoingAudioEnabled;
|
|
323
354
|
private _outgoingVideoEnabled;
|
|
324
355
|
private _outgoingVideoIsScreenShare;
|
|
@@ -328,8 +359,6 @@ export declare class Call {
|
|
|
328
359
|
remoteAudioLevel: NormalizedAudioLevel;
|
|
329
360
|
remoteSharingScreen: boolean;
|
|
330
361
|
networkRoute: NetworkRoute;
|
|
331
|
-
private _videoCapturer;
|
|
332
|
-
private _videoRenderer;
|
|
333
362
|
endedReason?: CallEndedReason;
|
|
334
363
|
handleStateChanged?: () => void;
|
|
335
364
|
handleRemoteAudioEnabled?: () => void;
|
|
@@ -355,27 +384,20 @@ export declare class Call {
|
|
|
355
384
|
get state(): CallState;
|
|
356
385
|
set state(state: CallState);
|
|
357
386
|
setCallEnded(): void;
|
|
358
|
-
set videoCapturer(capturer: VideoCapturer | null);
|
|
359
|
-
set videoRenderer(renderer: VideoRenderer | null);
|
|
360
387
|
accept(): void;
|
|
361
388
|
decline(): void;
|
|
362
389
|
ignore(): void;
|
|
363
390
|
hangup(): void;
|
|
364
|
-
get outgoingAudioEnabled(): boolean;
|
|
365
|
-
set outgoingAudioEnabled(enabled: boolean);
|
|
366
|
-
get outgoingVideoEnabled(): boolean;
|
|
367
|
-
set outgoingVideoEnabled(enabled: boolean);
|
|
368
|
-
set outgoingVideoIsScreenShare(isScreenShare: boolean);
|
|
369
391
|
get remoteAudioEnabled(): boolean;
|
|
370
392
|
set remoteAudioEnabled(enabled: boolean);
|
|
371
393
|
get remoteVideoEnabled(): boolean;
|
|
372
394
|
set remoteVideoEnabled(enabled: boolean);
|
|
395
|
+
setOutgoingAudioMuted(muted: boolean): void;
|
|
396
|
+
setOutgoingVideoMuted(muted: boolean): void;
|
|
397
|
+
setOutgoingVideoIsScreenShare(isScreenShare: boolean): void;
|
|
373
398
|
sendVideoFrame(width: number, height: number, format: VideoPixelFormatEnum, buffer: Buffer): void;
|
|
374
399
|
receiveVideoFrame(buffer: Buffer, maxWidth: number, maxHeight: number): [number, number] | undefined;
|
|
375
|
-
private enableOrDisableCapturer;
|
|
376
|
-
private setOutgoingVideoEnabled;
|
|
377
400
|
updateDataMode(dataMode: DataMode): void;
|
|
378
|
-
private enableOrDisableRenderer;
|
|
379
401
|
}
|
|
380
402
|
export type GroupCallClientId = number;
|
|
381
403
|
export declare enum ConnectionState {
|
package/dist/ringrtc/Service.js
CHANGED
|
@@ -7,8 +7,10 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
7
7
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
8
8
|
};
|
|
9
9
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
10
|
-
exports.CallLogLevel = exports.CallEndedReason = exports.CallState = exports.RingCancelReason = exports.DataMode = exports.HangupType = exports.OpaqueMessage = exports.HangupMessage = exports.BusyMessage = exports.IceCandidateMessage = exports.AnswerMessage = exports.OfferType = exports.OfferMessage = exports.CallingMessage = exports.GroupCall = exports.GroupCallKind = exports.VideoRequest = exports.GroupMemberInfo = exports.RemoteDeviceState = exports.LocalDeviceState = exports.HttpMethod = exports.RingUpdate = exports.CallMessageUrgency = exports.SpeechEvent = exports.GroupCallEndReason = exports.JoinState = exports.ConnectionState = exports.Call = exports.RingRTCType = exports.ReceivedAudioLevel = exports.NetworkRoute = exports.PeekStatusCodes = exports.callIdFromEra = void 0;
|
|
10
|
+
exports.CallLogLevel = exports.CallEndedReason = exports.CallState = exports.RingCancelReason = exports.DataMode = exports.HangupType = exports.OpaqueMessage = exports.HangupMessage = exports.BusyMessage = exports.IceCandidateMessage = exports.AnswerMessage = exports.OfferType = exports.OfferMessage = exports.CallingMessage = exports.GroupCall = exports.GroupCallKind = exports.VideoRequest = exports.GroupMemberInfo = exports.RemoteDeviceState = exports.LocalDeviceState = exports.HttpMethod = exports.RingUpdate = exports.CallMessageUrgency = exports.SpeechEvent = exports.GroupCallEndReason = exports.JoinState = exports.ConnectionState = exports.Call = exports.VideoPixelFormatEnum = exports.RingRTCType = exports.ReceivedAudioLevel = exports.NetworkRoute = exports.PeekStatusCodes = exports.callIdFromEra = void 0;
|
|
11
11
|
exports.callIdFromRingId = callIdFromRingId;
|
|
12
|
+
exports.videoPixelFormatToEnum = videoPixelFormatToEnum;
|
|
13
|
+
/* eslint-disable max-classes-per-file */
|
|
12
14
|
const CallLinks_1 = require("./CallLinks");
|
|
13
15
|
const Native_1 = __importDefault(require("./Native"));
|
|
14
16
|
const INVALID_CLIENT_ID = 0;
|
|
@@ -287,7 +289,6 @@ class RingRTCType {
|
|
|
287
289
|
const isIncoming = false;
|
|
288
290
|
const call = new Call(this.callManager, remoteUserId, callId, isIncoming, isVideoCall, CallState.Prering);
|
|
289
291
|
this._call = call;
|
|
290
|
-
call.outgoingVideoEnabled = isVideoCall;
|
|
291
292
|
return call;
|
|
292
293
|
}
|
|
293
294
|
// Called by UX
|
|
@@ -1111,14 +1112,12 @@ class RingRTCType {
|
|
|
1111
1112
|
}
|
|
1112
1113
|
return null;
|
|
1113
1114
|
}
|
|
1114
|
-
accept(callId
|
|
1115
|
+
accept(callId) {
|
|
1115
1116
|
const call = this.getCall(callId);
|
|
1116
1117
|
if (!call) {
|
|
1117
1118
|
return;
|
|
1118
1119
|
}
|
|
1119
1120
|
call.accept();
|
|
1120
|
-
call.outgoingAudioEnabled = true;
|
|
1121
|
-
call.outgoingVideoEnabled = asVideoCall;
|
|
1122
1121
|
}
|
|
1123
1122
|
decline(callId) {
|
|
1124
1123
|
const call = this.getCall(callId);
|
|
@@ -1141,41 +1140,6 @@ class RingRTCType {
|
|
|
1141
1140
|
}
|
|
1142
1141
|
call.hangup();
|
|
1143
1142
|
}
|
|
1144
|
-
setOutgoingAudio(callId, enabled) {
|
|
1145
|
-
const call = this.getCall(callId);
|
|
1146
|
-
if (!call) {
|
|
1147
|
-
return;
|
|
1148
|
-
}
|
|
1149
|
-
call.outgoingAudioEnabled = enabled;
|
|
1150
|
-
}
|
|
1151
|
-
setOutgoingVideo(callId, enabled) {
|
|
1152
|
-
const call = this.getCall(callId);
|
|
1153
|
-
if (!call) {
|
|
1154
|
-
return;
|
|
1155
|
-
}
|
|
1156
|
-
call.outgoingVideoEnabled = enabled;
|
|
1157
|
-
}
|
|
1158
|
-
setOutgoingVideoIsScreenShare(callId, isScreenShare) {
|
|
1159
|
-
const call = this.getCall(callId);
|
|
1160
|
-
if (!call) {
|
|
1161
|
-
return;
|
|
1162
|
-
}
|
|
1163
|
-
call.outgoingVideoIsScreenShare = isScreenShare;
|
|
1164
|
-
}
|
|
1165
|
-
setVideoCapturer(callId, capturer) {
|
|
1166
|
-
const call = this.getCall(callId);
|
|
1167
|
-
if (!call) {
|
|
1168
|
-
return;
|
|
1169
|
-
}
|
|
1170
|
-
call.videoCapturer = capturer;
|
|
1171
|
-
}
|
|
1172
|
-
setVideoRenderer(callId, renderer) {
|
|
1173
|
-
const call = this.getCall(callId);
|
|
1174
|
-
if (!call) {
|
|
1175
|
-
return;
|
|
1176
|
-
}
|
|
1177
|
-
call.videoRenderer = renderer;
|
|
1178
|
-
}
|
|
1179
1143
|
getAudioInputs() {
|
|
1180
1144
|
return this.callManager.getAudioInputs();
|
|
1181
1145
|
}
|
|
@@ -1190,8 +1154,30 @@ class RingRTCType {
|
|
|
1190
1154
|
}
|
|
1191
1155
|
}
|
|
1192
1156
|
exports.RingRTCType = RingRTCType;
|
|
1157
|
+
// Given a weird name to not conflict with WebCodec's VideoPixelFormat
|
|
1158
|
+
var VideoPixelFormatEnum;
|
|
1159
|
+
(function (VideoPixelFormatEnum) {
|
|
1160
|
+
VideoPixelFormatEnum[VideoPixelFormatEnum["I420"] = 0] = "I420";
|
|
1161
|
+
VideoPixelFormatEnum[VideoPixelFormatEnum["Nv12"] = 1] = "Nv12";
|
|
1162
|
+
VideoPixelFormatEnum[VideoPixelFormatEnum["Rgba"] = 2] = "Rgba";
|
|
1163
|
+
})(VideoPixelFormatEnum || (exports.VideoPixelFormatEnum = VideoPixelFormatEnum = {}));
|
|
1164
|
+
function videoPixelFormatToEnum(format) {
|
|
1165
|
+
switch (format) {
|
|
1166
|
+
case 'I420': {
|
|
1167
|
+
return VideoPixelFormatEnum.I420;
|
|
1168
|
+
}
|
|
1169
|
+
case 'NV12': {
|
|
1170
|
+
return VideoPixelFormatEnum.Nv12;
|
|
1171
|
+
}
|
|
1172
|
+
case 'RGBA': {
|
|
1173
|
+
return VideoPixelFormatEnum.Rgba;
|
|
1174
|
+
}
|
|
1175
|
+
}
|
|
1176
|
+
}
|
|
1193
1177
|
class Call {
|
|
1194
1178
|
constructor(callManager, remoteUserId, callId, isIncoming, isVideoCall, state) {
|
|
1179
|
+
// Media state flags.
|
|
1180
|
+
this._mediaSessionStarted = false;
|
|
1195
1181
|
this._outgoingAudioEnabled = false;
|
|
1196
1182
|
this._outgoingVideoEnabled = false;
|
|
1197
1183
|
this._outgoingVideoIsScreenShare = false;
|
|
@@ -1201,8 +1187,6 @@ class Call {
|
|
|
1201
1187
|
this.remoteAudioLevel = 0;
|
|
1202
1188
|
this.remoteSharingScreen = false;
|
|
1203
1189
|
this.networkRoute = new NetworkRoute();
|
|
1204
|
-
this._videoCapturer = null;
|
|
1205
|
-
this._videoRenderer = null;
|
|
1206
1190
|
this._callManager = callManager;
|
|
1207
1191
|
this._remoteUserId = remoteUserId;
|
|
1208
1192
|
this.callId = callId;
|
|
@@ -1228,11 +1212,39 @@ class Call {
|
|
|
1228
1212
|
}
|
|
1229
1213
|
this._state = state;
|
|
1230
1214
|
if (state === CallState.Accepted) {
|
|
1231
|
-
//
|
|
1232
|
-
this.
|
|
1215
|
+
// We might have been in the reconnecting state and already started media.
|
|
1216
|
+
if (!this._mediaSessionStarted) {
|
|
1217
|
+
sillyDeadlockProtection(() => {
|
|
1218
|
+
if (this._outgoingAudioEnabled) {
|
|
1219
|
+
this._callManager.setOutgoingAudioEnabled(true);
|
|
1220
|
+
}
|
|
1221
|
+
if (this._outgoingVideoIsScreenShare) {
|
|
1222
|
+
this._callManager.setOutgoingVideoIsScreenShare(true);
|
|
1223
|
+
this._callManager.setOutgoingVideoEnabled(true);
|
|
1224
|
+
}
|
|
1225
|
+
else if (this._outgoingVideoEnabled) {
|
|
1226
|
+
this._callManager.setOutgoingVideoEnabled(true);
|
|
1227
|
+
}
|
|
1228
|
+
});
|
|
1229
|
+
this._mediaSessionStarted = true;
|
|
1230
|
+
}
|
|
1231
|
+
}
|
|
1232
|
+
else if (state === CallState.Ended) {
|
|
1233
|
+
if (this._mediaSessionStarted) {
|
|
1234
|
+
sillyDeadlockProtection(() => {
|
|
1235
|
+
if (this._outgoingAudioEnabled) {
|
|
1236
|
+
this._callManager.setOutgoingAudioEnabled(false);
|
|
1237
|
+
}
|
|
1238
|
+
if (this._outgoingVideoEnabled) {
|
|
1239
|
+
this._callManager.setOutgoingVideoEnabled(false);
|
|
1240
|
+
}
|
|
1241
|
+
});
|
|
1242
|
+
this._outgoingAudioEnabled = false;
|
|
1243
|
+
this._outgoingVideoEnabled = false;
|
|
1244
|
+
this._outgoingVideoIsScreenShare = false;
|
|
1245
|
+
this._mediaSessionStarted = false;
|
|
1246
|
+
}
|
|
1233
1247
|
}
|
|
1234
|
-
this.enableOrDisableCapturer();
|
|
1235
|
-
this.enableOrDisableRenderer();
|
|
1236
1248
|
if (this.handleStateChanged) {
|
|
1237
1249
|
this.handleStateChanged();
|
|
1238
1250
|
}
|
|
@@ -1240,14 +1252,6 @@ class Call {
|
|
|
1240
1252
|
setCallEnded() {
|
|
1241
1253
|
this._state = CallState.Ended;
|
|
1242
1254
|
}
|
|
1243
|
-
set videoCapturer(capturer) {
|
|
1244
|
-
this._videoCapturer = capturer;
|
|
1245
|
-
this.enableOrDisableCapturer();
|
|
1246
|
-
}
|
|
1247
|
-
set videoRenderer(renderer) {
|
|
1248
|
-
this._videoRenderer = renderer;
|
|
1249
|
-
this.enableOrDisableRenderer();
|
|
1250
|
-
}
|
|
1251
1255
|
accept() {
|
|
1252
1256
|
this._callManager.accept(this.callId);
|
|
1253
1257
|
}
|
|
@@ -1258,43 +1262,10 @@ class Call {
|
|
|
1258
1262
|
this._callManager.ignore(this.callId);
|
|
1259
1263
|
}
|
|
1260
1264
|
hangup() {
|
|
1261
|
-
// This is a little faster than waiting for the
|
|
1262
|
-
// change in call state to come back.
|
|
1263
|
-
if (this._videoCapturer) {
|
|
1264
|
-
this._videoCapturer.disable();
|
|
1265
|
-
}
|
|
1266
|
-
if (this._videoRenderer) {
|
|
1267
|
-
this._videoRenderer.disable();
|
|
1268
|
-
}
|
|
1269
|
-
// This assumes we only have one active call.
|
|
1270
1265
|
sillyDeadlockProtection(() => {
|
|
1271
1266
|
this._callManager.hangup();
|
|
1272
1267
|
});
|
|
1273
1268
|
}
|
|
1274
|
-
get outgoingAudioEnabled() {
|
|
1275
|
-
return this._outgoingAudioEnabled;
|
|
1276
|
-
}
|
|
1277
|
-
set outgoingAudioEnabled(enabled) {
|
|
1278
|
-
this._outgoingAudioEnabled = enabled;
|
|
1279
|
-
// This assumes we only have one active call.
|
|
1280
|
-
sillyDeadlockProtection(() => {
|
|
1281
|
-
this._callManager.setOutgoingAudioEnabled(enabled);
|
|
1282
|
-
});
|
|
1283
|
-
}
|
|
1284
|
-
get outgoingVideoEnabled() {
|
|
1285
|
-
return this._outgoingVideoEnabled;
|
|
1286
|
-
}
|
|
1287
|
-
set outgoingVideoEnabled(enabled) {
|
|
1288
|
-
this._outgoingVideoEnabled = enabled;
|
|
1289
|
-
this.enableOrDisableCapturer();
|
|
1290
|
-
}
|
|
1291
|
-
set outgoingVideoIsScreenShare(isScreenShare) {
|
|
1292
|
-
// This assumes we only have one active call.
|
|
1293
|
-
this._outgoingVideoIsScreenShare = isScreenShare;
|
|
1294
|
-
sillyDeadlockProtection(() => {
|
|
1295
|
-
this._callManager.setOutgoingVideoIsScreenShare(isScreenShare);
|
|
1296
|
-
});
|
|
1297
|
-
}
|
|
1298
1269
|
get remoteAudioEnabled() {
|
|
1299
1270
|
return this._remoteAudioEnabled;
|
|
1300
1271
|
}
|
|
@@ -1306,63 +1277,51 @@ class Call {
|
|
|
1306
1277
|
}
|
|
1307
1278
|
set remoteVideoEnabled(enabled) {
|
|
1308
1279
|
this._remoteVideoEnabled = enabled;
|
|
1309
|
-
|
|
1280
|
+
}
|
|
1281
|
+
setOutgoingAudioMuted(muted) {
|
|
1282
|
+
const enabled = !muted;
|
|
1283
|
+
if (this._mediaSessionStarted && this._outgoingAudioEnabled !== enabled) {
|
|
1284
|
+
this._outgoingAudioEnabled = enabled;
|
|
1285
|
+
sillyDeadlockProtection(() => {
|
|
1286
|
+
this._callManager.setOutgoingAudioEnabled(enabled);
|
|
1287
|
+
});
|
|
1288
|
+
}
|
|
1289
|
+
else {
|
|
1290
|
+
this._outgoingAudioEnabled = enabled;
|
|
1291
|
+
}
|
|
1292
|
+
}
|
|
1293
|
+
setOutgoingVideoMuted(muted) {
|
|
1294
|
+
const enabled = !muted;
|
|
1295
|
+
if (this._mediaSessionStarted && this._outgoingVideoEnabled !== enabled) {
|
|
1296
|
+
this._outgoingVideoEnabled = enabled;
|
|
1297
|
+
sillyDeadlockProtection(() => {
|
|
1298
|
+
this._callManager.setOutgoingVideoEnabled(enabled);
|
|
1299
|
+
});
|
|
1300
|
+
}
|
|
1301
|
+
else {
|
|
1302
|
+
this._outgoingVideoEnabled = enabled;
|
|
1303
|
+
}
|
|
1304
|
+
}
|
|
1305
|
+
setOutgoingVideoIsScreenShare(isScreenShare) {
|
|
1306
|
+
if (this._mediaSessionStarted &&
|
|
1307
|
+
this._outgoingVideoIsScreenShare !== isScreenShare) {
|
|
1308
|
+
this._outgoingVideoIsScreenShare = isScreenShare;
|
|
1309
|
+
sillyDeadlockProtection(() => {
|
|
1310
|
+
this._callManager.setOutgoingVideoIsScreenShare(isScreenShare);
|
|
1311
|
+
});
|
|
1312
|
+
}
|
|
1313
|
+
else {
|
|
1314
|
+
this._outgoingVideoIsScreenShare = isScreenShare;
|
|
1315
|
+
}
|
|
1310
1316
|
}
|
|
1311
1317
|
// With this method, a Call is a VideoFrameSender
|
|
1312
1318
|
sendVideoFrame(width, height, format, buffer) {
|
|
1313
|
-
// This assumes we only have one active call.
|
|
1314
1319
|
this._callManager.sendVideoFrame(width, height, format, buffer);
|
|
1315
1320
|
}
|
|
1316
1321
|
// With this method, a Call is a VideoFrameSource
|
|
1317
1322
|
receiveVideoFrame(buffer, maxWidth, maxHeight) {
|
|
1318
|
-
// This assumes we only have one active call.
|
|
1319
1323
|
return this._callManager.receiveVideoFrame(buffer, maxWidth, maxHeight);
|
|
1320
1324
|
}
|
|
1321
|
-
enableOrDisableCapturer() {
|
|
1322
|
-
if (!this._videoCapturer) {
|
|
1323
|
-
return;
|
|
1324
|
-
}
|
|
1325
|
-
if (!this.outgoingVideoEnabled) {
|
|
1326
|
-
this._videoCapturer.disable();
|
|
1327
|
-
if (this.state === CallState.Accepted) {
|
|
1328
|
-
this.setOutgoingVideoEnabled(false);
|
|
1329
|
-
}
|
|
1330
|
-
return;
|
|
1331
|
-
}
|
|
1332
|
-
switch (this.state) {
|
|
1333
|
-
case CallState.Prering:
|
|
1334
|
-
case CallState.Ringing:
|
|
1335
|
-
this._videoCapturer.enableCapture();
|
|
1336
|
-
break;
|
|
1337
|
-
case CallState.Accepted:
|
|
1338
|
-
this._videoCapturer.enableCaptureAndSend(this);
|
|
1339
|
-
this.setOutgoingVideoEnabled(true);
|
|
1340
|
-
if (this._outgoingVideoIsScreenShare) {
|
|
1341
|
-
// Make sure the status gets sent.
|
|
1342
|
-
this.outgoingVideoIsScreenShare = true;
|
|
1343
|
-
}
|
|
1344
|
-
break;
|
|
1345
|
-
case CallState.Reconnecting:
|
|
1346
|
-
this._videoCapturer.enableCaptureAndSend(this);
|
|
1347
|
-
// Don't send status until we're reconnected.
|
|
1348
|
-
break;
|
|
1349
|
-
case CallState.Ended:
|
|
1350
|
-
this._videoCapturer.disable();
|
|
1351
|
-
break;
|
|
1352
|
-
default:
|
|
1353
|
-
}
|
|
1354
|
-
}
|
|
1355
|
-
setOutgoingVideoEnabled(enabled) {
|
|
1356
|
-
sillyDeadlockProtection(() => {
|
|
1357
|
-
try {
|
|
1358
|
-
this._callManager.setOutgoingVideoEnabled(enabled);
|
|
1359
|
-
}
|
|
1360
|
-
catch {
|
|
1361
|
-
// We may not have an active connection any more.
|
|
1362
|
-
// In which case it doesn't matter
|
|
1363
|
-
}
|
|
1364
|
-
});
|
|
1365
|
-
}
|
|
1366
1325
|
updateDataMode(dataMode) {
|
|
1367
1326
|
sillyDeadlockProtection(() => {
|
|
1368
1327
|
try {
|
|
@@ -1374,29 +1333,6 @@ class Call {
|
|
|
1374
1333
|
}
|
|
1375
1334
|
});
|
|
1376
1335
|
}
|
|
1377
|
-
enableOrDisableRenderer() {
|
|
1378
|
-
if (!this._videoRenderer) {
|
|
1379
|
-
return;
|
|
1380
|
-
}
|
|
1381
|
-
if (!this.remoteVideoEnabled) {
|
|
1382
|
-
this._videoRenderer.disable();
|
|
1383
|
-
return;
|
|
1384
|
-
}
|
|
1385
|
-
switch (this.state) {
|
|
1386
|
-
case CallState.Prering:
|
|
1387
|
-
case CallState.Ringing:
|
|
1388
|
-
this._videoRenderer.disable();
|
|
1389
|
-
break;
|
|
1390
|
-
case CallState.Accepted:
|
|
1391
|
-
case CallState.Reconnecting:
|
|
1392
|
-
this._videoRenderer.enable(this);
|
|
1393
|
-
break;
|
|
1394
|
-
case CallState.Ended:
|
|
1395
|
-
this._videoRenderer.disable();
|
|
1396
|
-
break;
|
|
1397
|
-
default:
|
|
1398
|
-
}
|
|
1399
|
-
}
|
|
1400
1336
|
}
|
|
1401
1337
|
exports.Call = Call;
|
|
1402
1338
|
// Represents the connection state to a media server for a group call.
|
|
@@ -1722,7 +1658,6 @@ class GroupCall {
|
|
|
1722
1658
|
}
|
|
1723
1659
|
// With this, a GroupCall is a VideoFrameSender
|
|
1724
1660
|
sendVideoFrame(width, height, format, buffer) {
|
|
1725
|
-
// This assumes we only have one active call.
|
|
1726
1661
|
this._callManager.sendVideoFrame(width, height, format, buffer);
|
|
1727
1662
|
}
|
|
1728
1663
|
// With this, a GroupCall can provide a VideoFrameSource for each remote device.
|
|
@@ -1745,7 +1680,6 @@ class GroupCall {
|
|
|
1745
1680
|
}
|
|
1746
1681
|
}
|
|
1747
1682
|
exports.GroupCall = GroupCall;
|
|
1748
|
-
// Implements VideoSource for use in CanvasVideoRenderer
|
|
1749
1683
|
class GroupCallVideoFrameSource {
|
|
1750
1684
|
constructor(callManager, groupCall, remoteDemuxId // Uint32
|
|
1751
1685
|
) {
|
|
@@ -1754,7 +1688,6 @@ class GroupCallVideoFrameSource {
|
|
|
1754
1688
|
this._remoteDemuxId = remoteDemuxId;
|
|
1755
1689
|
}
|
|
1756
1690
|
receiveVideoFrame(buffer, maxWidth, maxHeight) {
|
|
1757
|
-
// This assumes we only have one active call.
|
|
1758
1691
|
const frame = this._callManager.receiveGroupCallVideoFrame(this._groupCall.clientId, this._remoteDemuxId, buffer, maxWidth, maxHeight);
|
|
1759
1692
|
if (frame) {
|
|
1760
1693
|
const [width, height] = frame;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@signalapp/ringrtc",
|
|
3
|
-
"version": "2.
|
|
3
|
+
"version": "2.51.0",
|
|
4
4
|
"description": "Signal Messenger voice and video calling library.",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"types": "dist/index.d.ts",
|
|
@@ -25,7 +25,7 @@
|
|
|
25
25
|
},
|
|
26
26
|
"config": {
|
|
27
27
|
"prebuildUrl": "https://build-artifacts.signal.org/libraries/ringrtc-desktop-build-v${npm_package_version}.tar.gz",
|
|
28
|
-
"prebuildChecksum": "
|
|
28
|
+
"prebuildChecksum": "77621836d3b47b8a4a5ba81941b9bdf1f27e9b70187ca2b00e8e467d0292f137"
|
|
29
29
|
},
|
|
30
30
|
"author": "",
|
|
31
31
|
"license": "AGPL-3.0-only",
|
|
@@ -36,11 +36,9 @@
|
|
|
36
36
|
"devDependencies": {
|
|
37
37
|
"@types/chai": "4.3.16",
|
|
38
38
|
"@types/chai-as-promised": "^7.1.4",
|
|
39
|
-
"@types/dom-mediacapture-transform": "0.1.10",
|
|
40
39
|
"@types/lodash": "^4.14.106",
|
|
41
40
|
"@types/mocha": "10.0.9",
|
|
42
41
|
"@types/node": "20.17.6",
|
|
43
|
-
"@types/offscreencanvas": "^2019.7.3",
|
|
44
42
|
"@types/sinon-chai": "^3.2.12",
|
|
45
43
|
"chai": "4.4.1",
|
|
46
44
|
"chai-as-promised": "^7.1.1",
|
|
@@ -1,77 +0,0 @@
|
|
|
1
|
-
interface Ref<T> {
|
|
2
|
-
readonly current: T | null;
|
|
3
|
-
}
|
|
4
|
-
export declare enum VideoPixelFormatEnum {
|
|
5
|
-
I420 = 0,
|
|
6
|
-
Nv12 = 1,
|
|
7
|
-
Rgba = 2
|
|
8
|
-
}
|
|
9
|
-
export interface VideoFrameSource {
|
|
10
|
-
/**
|
|
11
|
-
* Copies the latest frame into `buffer`.
|
|
12
|
-
*
|
|
13
|
-
* Note that `maxWidth` and `maxHeight` specify maximum dimensions,
|
|
14
|
-
* but allow for rotation, i.e. a maximum of 1920x1080 will also allow
|
|
15
|
-
* portrait-mode 1080x1920.
|
|
16
|
-
*
|
|
17
|
-
* Returns a `[width, height]` pair for the resulting frame,
|
|
18
|
-
* or `undefined` if there's no new frame ready to be displayed.
|
|
19
|
-
*/
|
|
20
|
-
receiveVideoFrame(buffer: Buffer, maxWidth: number, maxHeight: number): [number, number] | undefined;
|
|
21
|
-
}
|
|
22
|
-
interface VideoFrameSender {
|
|
23
|
-
sendVideoFrame(width: number, height: number, format: VideoPixelFormatEnum, buffer: Buffer): void;
|
|
24
|
-
}
|
|
25
|
-
export declare class GumVideoCaptureOptions {
|
|
26
|
-
maxWidth: number;
|
|
27
|
-
maxHeight: number;
|
|
28
|
-
maxFramerate: number;
|
|
29
|
-
preferredDeviceId?: string;
|
|
30
|
-
screenShareSourceId?: string;
|
|
31
|
-
mediaStream?: MediaStream;
|
|
32
|
-
onEnded?: () => void;
|
|
33
|
-
}
|
|
34
|
-
export declare class GumVideoCapturer {
|
|
35
|
-
private defaultCaptureOptions;
|
|
36
|
-
private localPreview?;
|
|
37
|
-
private captureOptions?;
|
|
38
|
-
private sender?;
|
|
39
|
-
private mediaStream?;
|
|
40
|
-
private spawnedSenderRunning;
|
|
41
|
-
private preferredDeviceId?;
|
|
42
|
-
private updateLocalPreviewIntervalId?;
|
|
43
|
-
constructor(defaultCaptureOptions: GumVideoCaptureOptions);
|
|
44
|
-
capturing(): boolean;
|
|
45
|
-
setLocalPreview(localPreview: Ref<HTMLVideoElement> | undefined): void;
|
|
46
|
-
enableCapture(options?: GumVideoCaptureOptions): Promise<void>;
|
|
47
|
-
enableCaptureAndSend(sender?: VideoFrameSender, options?: GumVideoCaptureOptions): Promise<void>;
|
|
48
|
-
disable(): void;
|
|
49
|
-
setPreferredDevice(deviceId: string): Promise<void>;
|
|
50
|
-
enumerateDevices(): Promise<Array<MediaDeviceInfo>>;
|
|
51
|
-
private getUserMedia;
|
|
52
|
-
private startCapturing;
|
|
53
|
-
private stopCapturing;
|
|
54
|
-
private startSending;
|
|
55
|
-
private spawnSender;
|
|
56
|
-
private stopSending;
|
|
57
|
-
private updateLocalPreviewSourceObject;
|
|
58
|
-
}
|
|
59
|
-
export declare const MAX_VIDEO_CAPTURE_WIDTH: number;
|
|
60
|
-
export declare const MAX_VIDEO_CAPTURE_HEIGHT: number;
|
|
61
|
-
export declare const MAX_VIDEO_CAPTURE_AREA: number;
|
|
62
|
-
export declare const MAX_VIDEO_CAPTURE_BUFFER_SIZE: number;
|
|
63
|
-
export declare class CanvasVideoRenderer {
|
|
64
|
-
private canvas?;
|
|
65
|
-
private buffer;
|
|
66
|
-
private imageData?;
|
|
67
|
-
private source?;
|
|
68
|
-
private rafId?;
|
|
69
|
-
constructor();
|
|
70
|
-
setCanvas(canvas: Ref<HTMLCanvasElement> | undefined): void;
|
|
71
|
-
enable(source: VideoFrameSource): void;
|
|
72
|
-
disable(): void;
|
|
73
|
-
private requestAnimationFrameCallback;
|
|
74
|
-
private renderBlack;
|
|
75
|
-
private renderVideoFrame;
|
|
76
|
-
}
|
|
77
|
-
export {};
|
|
@@ -1,433 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
//
|
|
3
|
-
// Copyright 2019-2021 Signal Messenger, LLC
|
|
4
|
-
// SPDX-License-Identifier: AGPL-3.0-only
|
|
5
|
-
//
|
|
6
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
-
exports.CanvasVideoRenderer = exports.MAX_VIDEO_CAPTURE_BUFFER_SIZE = exports.MAX_VIDEO_CAPTURE_AREA = exports.MAX_VIDEO_CAPTURE_HEIGHT = exports.MAX_VIDEO_CAPTURE_WIDTH = exports.GumVideoCapturer = exports.GumVideoCaptureOptions = exports.VideoPixelFormatEnum = void 0;
|
|
8
|
-
const index_1 = require("../index");
|
|
9
|
-
// Given a weird name to not conflict with WebCodec's VideoPixelFormat
|
|
10
|
-
var VideoPixelFormatEnum;
|
|
11
|
-
(function (VideoPixelFormatEnum) {
|
|
12
|
-
VideoPixelFormatEnum[VideoPixelFormatEnum["I420"] = 0] = "I420";
|
|
13
|
-
VideoPixelFormatEnum[VideoPixelFormatEnum["Nv12"] = 1] = "Nv12";
|
|
14
|
-
VideoPixelFormatEnum[VideoPixelFormatEnum["Rgba"] = 2] = "Rgba";
|
|
15
|
-
})(VideoPixelFormatEnum || (exports.VideoPixelFormatEnum = VideoPixelFormatEnum = {}));
|
|
16
|
-
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
17
|
-
function videoPixelFormatFromEnum(format) {
|
|
18
|
-
switch (format) {
|
|
19
|
-
case VideoPixelFormatEnum.I420: {
|
|
20
|
-
return 'I420';
|
|
21
|
-
}
|
|
22
|
-
case VideoPixelFormatEnum.Nv12: {
|
|
23
|
-
return 'NV12';
|
|
24
|
-
}
|
|
25
|
-
case VideoPixelFormatEnum.Rgba: {
|
|
26
|
-
return 'RGBA';
|
|
27
|
-
}
|
|
28
|
-
}
|
|
29
|
-
}
|
|
30
|
-
function videoPixelFormatToEnum(format) {
|
|
31
|
-
switch (format) {
|
|
32
|
-
case 'I420': {
|
|
33
|
-
return VideoPixelFormatEnum.I420;
|
|
34
|
-
}
|
|
35
|
-
case 'NV12': {
|
|
36
|
-
return VideoPixelFormatEnum.Nv12;
|
|
37
|
-
}
|
|
38
|
-
case 'RGBA': {
|
|
39
|
-
return VideoPixelFormatEnum.Rgba;
|
|
40
|
-
}
|
|
41
|
-
}
|
|
42
|
-
}
|
|
43
|
-
class GumVideoCaptureOptions {
|
|
44
|
-
constructor() {
|
|
45
|
-
this.maxWidth = 640;
|
|
46
|
-
this.maxHeight = 480;
|
|
47
|
-
this.maxFramerate = 30;
|
|
48
|
-
}
|
|
49
|
-
}
|
|
50
|
-
exports.GumVideoCaptureOptions = GumVideoCaptureOptions;
|
|
51
|
-
class GumVideoCapturer {
|
|
52
|
-
constructor(defaultCaptureOptions) {
|
|
53
|
-
this.spawnedSenderRunning = false;
|
|
54
|
-
this.defaultCaptureOptions = defaultCaptureOptions;
|
|
55
|
-
}
|
|
56
|
-
capturing() {
|
|
57
|
-
return this.captureOptions != undefined;
|
|
58
|
-
}
|
|
59
|
-
setLocalPreview(localPreview) {
|
|
60
|
-
const oldLocalPreview = this.localPreview?.current;
|
|
61
|
-
if (oldLocalPreview) {
|
|
62
|
-
oldLocalPreview.srcObject = null;
|
|
63
|
-
}
|
|
64
|
-
this.localPreview = localPreview;
|
|
65
|
-
this.updateLocalPreviewSourceObject();
|
|
66
|
-
// This is a dumb hack around the fact that sometimes the
|
|
67
|
-
// this.localPreview.current is updated without a call
|
|
68
|
-
// to setLocalPreview, in which case the local preview
|
|
69
|
-
// won't be rendered.
|
|
70
|
-
if (this.updateLocalPreviewIntervalId != undefined) {
|
|
71
|
-
clearInterval(this.updateLocalPreviewIntervalId);
|
|
72
|
-
}
|
|
73
|
-
this.updateLocalPreviewIntervalId = setInterval(this.updateLocalPreviewSourceObject.bind(this), 1000);
|
|
74
|
-
}
|
|
75
|
-
async enableCapture(options) {
|
|
76
|
-
return this.startCapturing(options ?? this.defaultCaptureOptions);
|
|
77
|
-
}
|
|
78
|
-
async enableCaptureAndSend(sender, options) {
|
|
79
|
-
const startCapturingPromise = this.startCapturing(options ?? this.defaultCaptureOptions);
|
|
80
|
-
if (sender) {
|
|
81
|
-
this.startSending(sender);
|
|
82
|
-
}
|
|
83
|
-
// Bubble up the error.
|
|
84
|
-
return startCapturingPromise;
|
|
85
|
-
}
|
|
86
|
-
disable() {
|
|
87
|
-
this.stopCapturing();
|
|
88
|
-
this.stopSending();
|
|
89
|
-
if (this.updateLocalPreviewIntervalId != undefined) {
|
|
90
|
-
clearInterval(this.updateLocalPreviewIntervalId);
|
|
91
|
-
}
|
|
92
|
-
this.updateLocalPreviewIntervalId = undefined;
|
|
93
|
-
}
|
|
94
|
-
// eslint-disable-next-line @typescript-eslint/require-await
|
|
95
|
-
async setPreferredDevice(deviceId) {
|
|
96
|
-
this.preferredDeviceId = deviceId;
|
|
97
|
-
if (this.captureOptions) {
|
|
98
|
-
const { captureOptions, sender } = this;
|
|
99
|
-
this.disable();
|
|
100
|
-
// Bubble up the error if starting video failed.
|
|
101
|
-
return this.enableCaptureAndSend(sender, captureOptions);
|
|
102
|
-
}
|
|
103
|
-
}
|
|
104
|
-
async enumerateDevices() {
|
|
105
|
-
const devices = await window.navigator.mediaDevices.enumerateDevices();
|
|
106
|
-
const cameras = devices.filter(d => d.kind == 'videoinput');
|
|
107
|
-
return cameras;
|
|
108
|
-
}
|
|
109
|
-
async getUserMedia(options) {
|
|
110
|
-
// Return provided media stream
|
|
111
|
-
if (options.mediaStream) {
|
|
112
|
-
return options.mediaStream;
|
|
113
|
-
}
|
|
114
|
-
const constraints = {
|
|
115
|
-
audio: false,
|
|
116
|
-
video: {
|
|
117
|
-
deviceId: options.preferredDeviceId ?? this.preferredDeviceId,
|
|
118
|
-
width: {
|
|
119
|
-
max: options.maxWidth,
|
|
120
|
-
ideal: options.maxWidth,
|
|
121
|
-
},
|
|
122
|
-
height: {
|
|
123
|
-
max: options.maxHeight,
|
|
124
|
-
ideal: options.maxHeight,
|
|
125
|
-
},
|
|
126
|
-
frameRate: {
|
|
127
|
-
max: options.maxFramerate,
|
|
128
|
-
ideal: options.maxFramerate,
|
|
129
|
-
},
|
|
130
|
-
},
|
|
131
|
-
};
|
|
132
|
-
if (options.screenShareSourceId != undefined) {
|
|
133
|
-
constraints.video = {
|
|
134
|
-
mandatory: {
|
|
135
|
-
chromeMediaSource: 'desktop',
|
|
136
|
-
chromeMediaSourceId: options.screenShareSourceId,
|
|
137
|
-
maxWidth: options.maxWidth,
|
|
138
|
-
maxHeight: options.maxHeight,
|
|
139
|
-
minFrameRate: 1,
|
|
140
|
-
maxFrameRate: options.maxFramerate,
|
|
141
|
-
},
|
|
142
|
-
};
|
|
143
|
-
}
|
|
144
|
-
return window.navigator.mediaDevices.getUserMedia(constraints);
|
|
145
|
-
}
|
|
146
|
-
async startCapturing(options) {
|
|
147
|
-
if (this.capturing()) {
|
|
148
|
-
index_1.RingRTC.logWarn('startCapturing(): already capturing');
|
|
149
|
-
return;
|
|
150
|
-
}
|
|
151
|
-
index_1.RingRTC.logInfo(`startCapturing(): ${options.maxWidth}x${options.maxHeight}@${options.maxFramerate}`);
|
|
152
|
-
this.captureOptions = options;
|
|
153
|
-
try {
|
|
154
|
-
// If we start/stop/start, we may have concurrent calls to getUserMedia,
|
|
155
|
-
// which is what we want if we're switching from camera to screenshare.
|
|
156
|
-
// But we need to make sure we deal with the fact that things might be
|
|
157
|
-
// different after the await here.
|
|
158
|
-
const mediaStream = await this.getUserMedia(options);
|
|
159
|
-
// It's possible video was disabled, switched to screenshare, or
|
|
160
|
-
// switched to a different camera while awaiting a response, in
|
|
161
|
-
// which case we need to disable the camera we just accessed.
|
|
162
|
-
if (this.captureOptions != options) {
|
|
163
|
-
index_1.RingRTC.logWarn('startCapturing(): different state after getUserMedia()');
|
|
164
|
-
for (const track of mediaStream.getVideoTracks()) {
|
|
165
|
-
// Make the light turn off faster
|
|
166
|
-
track.stop();
|
|
167
|
-
}
|
|
168
|
-
return;
|
|
169
|
-
}
|
|
170
|
-
if (this.mediaStream !== undefined &&
|
|
171
|
-
this.mediaStream.getVideoTracks().length > 0) {
|
|
172
|
-
// We have a stream and track for the requested camera already. Stop
|
|
173
|
-
// the duplicate track that we just started.
|
|
174
|
-
index_1.RingRTC.logWarn('startCapturing(): dropping duplicate call to startCapturing');
|
|
175
|
-
for (const track of mediaStream.getVideoTracks()) {
|
|
176
|
-
track.stop();
|
|
177
|
-
}
|
|
178
|
-
return;
|
|
179
|
-
}
|
|
180
|
-
this.mediaStream = mediaStream;
|
|
181
|
-
if (!this.spawnedSenderRunning &&
|
|
182
|
-
this.mediaStream != undefined &&
|
|
183
|
-
this.sender != undefined) {
|
|
184
|
-
this.spawnSender(this.mediaStream, this.sender);
|
|
185
|
-
}
|
|
186
|
-
this.updateLocalPreviewSourceObject();
|
|
187
|
-
}
|
|
188
|
-
catch (e) {
|
|
189
|
-
index_1.RingRTC.logError(`startCapturing(): ${e}`);
|
|
190
|
-
// It's possible video was disabled, switched to screenshare, or
|
|
191
|
-
// switched to a different camera while awaiting a response, in
|
|
192
|
-
// which case we should reset the captureOptions if we set them.
|
|
193
|
-
if (this.captureOptions == options) {
|
|
194
|
-
// We couldn't open the camera. Oh well.
|
|
195
|
-
this.captureOptions = undefined;
|
|
196
|
-
}
|
|
197
|
-
// Re-raise so that callers can surface this condition to the user.
|
|
198
|
-
throw e;
|
|
199
|
-
}
|
|
200
|
-
}
|
|
201
|
-
stopCapturing() {
|
|
202
|
-
if (!this.capturing()) {
|
|
203
|
-
index_1.RingRTC.logWarn('stopCapturing(): not capturing');
|
|
204
|
-
return;
|
|
205
|
-
}
|
|
206
|
-
index_1.RingRTC.logInfo('stopCapturing()');
|
|
207
|
-
this.captureOptions = undefined;
|
|
208
|
-
if (this.mediaStream) {
|
|
209
|
-
for (const track of this.mediaStream.getVideoTracks()) {
|
|
210
|
-
// Make the light turn off faster
|
|
211
|
-
track.stop();
|
|
212
|
-
}
|
|
213
|
-
this.mediaStream = undefined;
|
|
214
|
-
}
|
|
215
|
-
this.updateLocalPreviewSourceObject();
|
|
216
|
-
}
|
|
217
|
-
startSending(sender) {
|
|
218
|
-
if (this.sender === sender) {
|
|
219
|
-
return;
|
|
220
|
-
}
|
|
221
|
-
if (this.sender) {
|
|
222
|
-
// If we're replacing an existing sender, make sure we stop the
|
|
223
|
-
// current setInterval loop before starting another one.
|
|
224
|
-
this.stopSending();
|
|
225
|
-
}
|
|
226
|
-
this.sender = sender;
|
|
227
|
-
if (!this.spawnedSenderRunning && this.mediaStream != undefined) {
|
|
228
|
-
this.spawnSender(this.mediaStream, this.sender);
|
|
229
|
-
}
|
|
230
|
-
}
|
|
231
|
-
spawnSender(mediaStream, sender) {
|
|
232
|
-
const track = mediaStream.getVideoTracks()[0];
|
|
233
|
-
if (track == undefined || this.spawnedSenderRunning) {
|
|
234
|
-
return;
|
|
235
|
-
}
|
|
236
|
-
const captureOptions = this.captureOptions;
|
|
237
|
-
if (track.readyState === 'ended') {
|
|
238
|
-
this.stopCapturing();
|
|
239
|
-
index_1.RingRTC.logError('spawnSender(): Video track ended before spawning sender');
|
|
240
|
-
return;
|
|
241
|
-
}
|
|
242
|
-
const reader = new MediaStreamTrackProcessor({
|
|
243
|
-
track,
|
|
244
|
-
}).readable.getReader();
|
|
245
|
-
const buffer = Buffer.alloc(exports.MAX_VIDEO_CAPTURE_BUFFER_SIZE);
|
|
246
|
-
this.spawnedSenderRunning = true;
|
|
247
|
-
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
|
248
|
-
(async () => {
|
|
249
|
-
try {
|
|
250
|
-
while (mediaStream == this.mediaStream) {
|
|
251
|
-
const { done, value: frame } = await reader.read();
|
|
252
|
-
if (done) {
|
|
253
|
-
break;
|
|
254
|
-
}
|
|
255
|
-
if (!frame) {
|
|
256
|
-
continue;
|
|
257
|
-
}
|
|
258
|
-
try {
|
|
259
|
-
const format = videoPixelFormatToEnum(frame.format ?? 'I420');
|
|
260
|
-
if (format == undefined) {
|
|
261
|
-
index_1.RingRTC.logWarn(`Unsupported video frame format: ${frame.format}`);
|
|
262
|
-
break;
|
|
263
|
-
}
|
|
264
|
-
const visibleRect = frame.visibleRect;
|
|
265
|
-
if (!visibleRect) {
|
|
266
|
-
continue;
|
|
267
|
-
}
|
|
268
|
-
await frame.copyTo(buffer);
|
|
269
|
-
if (sender !== this.sender) {
|
|
270
|
-
break;
|
|
271
|
-
}
|
|
272
|
-
sender.sendVideoFrame(visibleRect.width, visibleRect.height, format, buffer);
|
|
273
|
-
}
|
|
274
|
-
catch (e) {
|
|
275
|
-
index_1.RingRTC.logError(`sendVideoFrame(): ${e}`);
|
|
276
|
-
}
|
|
277
|
-
finally {
|
|
278
|
-
// This must be called for more frames to come.
|
|
279
|
-
frame.close();
|
|
280
|
-
}
|
|
281
|
-
}
|
|
282
|
-
}
|
|
283
|
-
catch (e) {
|
|
284
|
-
index_1.RingRTC.logError(`spawnSender(): ${e}`);
|
|
285
|
-
}
|
|
286
|
-
finally {
|
|
287
|
-
reader.releaseLock();
|
|
288
|
-
captureOptions?.onEnded?.();
|
|
289
|
-
}
|
|
290
|
-
this.spawnedSenderRunning = false;
|
|
291
|
-
})();
|
|
292
|
-
}
|
|
293
|
-
stopSending() {
|
|
294
|
-
// The spawned sender should stop
|
|
295
|
-
this.sender = undefined;
|
|
296
|
-
}
|
|
297
|
-
updateLocalPreviewSourceObject() {
|
|
298
|
-
if (!this.localPreview) {
|
|
299
|
-
return;
|
|
300
|
-
}
|
|
301
|
-
const localPreview = this.localPreview.current;
|
|
302
|
-
if (!localPreview) {
|
|
303
|
-
return;
|
|
304
|
-
}
|
|
305
|
-
const { mediaStream = null } = this;
|
|
306
|
-
if (localPreview.srcObject === mediaStream) {
|
|
307
|
-
return;
|
|
308
|
-
}
|
|
309
|
-
if (mediaStream && this.captureOptions) {
|
|
310
|
-
localPreview.srcObject = mediaStream;
|
|
311
|
-
if (localPreview.width === 0) {
|
|
312
|
-
localPreview.width = this.captureOptions.maxWidth;
|
|
313
|
-
}
|
|
314
|
-
if (localPreview.height === 0) {
|
|
315
|
-
localPreview.height = this.captureOptions.maxHeight;
|
|
316
|
-
}
|
|
317
|
-
}
|
|
318
|
-
else {
|
|
319
|
-
localPreview.srcObject = null;
|
|
320
|
-
}
|
|
321
|
-
}
|
|
322
|
-
}
|
|
323
|
-
exports.GumVideoCapturer = GumVideoCapturer;
|
|
324
|
-
// We add 10% in each dimension to allow for things that are slightly wider or taller than 1080p.
|
|
325
|
-
const MAX_VIDEO_CAPTURE_MULTIPLIER = 1.0;
|
|
326
|
-
exports.MAX_VIDEO_CAPTURE_WIDTH = 2880 * MAX_VIDEO_CAPTURE_MULTIPLIER;
|
|
327
|
-
exports.MAX_VIDEO_CAPTURE_HEIGHT = 1800 * MAX_VIDEO_CAPTURE_MULTIPLIER;
|
|
328
|
-
exports.MAX_VIDEO_CAPTURE_AREA = exports.MAX_VIDEO_CAPTURE_WIDTH * exports.MAX_VIDEO_CAPTURE_HEIGHT;
|
|
329
|
-
exports.MAX_VIDEO_CAPTURE_BUFFER_SIZE = exports.MAX_VIDEO_CAPTURE_AREA * 4;
|
|
330
|
-
class CanvasVideoRenderer {
|
|
331
|
-
constructor() {
|
|
332
|
-
this.buffer = Buffer.alloc(exports.MAX_VIDEO_CAPTURE_BUFFER_SIZE);
|
|
333
|
-
}
|
|
334
|
-
setCanvas(canvas) {
|
|
335
|
-
this.canvas = canvas;
|
|
336
|
-
}
|
|
337
|
-
enable(source) {
|
|
338
|
-
if (this.source === source) {
|
|
339
|
-
return;
|
|
340
|
-
}
|
|
341
|
-
if (this.source) {
|
|
342
|
-
// If we're replacing an existing source, make sure we stop the
|
|
343
|
-
// current rAF loop before starting another one.
|
|
344
|
-
if (this.rafId) {
|
|
345
|
-
window.cancelAnimationFrame(this.rafId);
|
|
346
|
-
}
|
|
347
|
-
}
|
|
348
|
-
this.source = source;
|
|
349
|
-
this.requestAnimationFrameCallback();
|
|
350
|
-
}
|
|
351
|
-
disable() {
|
|
352
|
-
this.renderBlack();
|
|
353
|
-
this.source = undefined;
|
|
354
|
-
if (this.rafId) {
|
|
355
|
-
window.cancelAnimationFrame(this.rafId);
|
|
356
|
-
}
|
|
357
|
-
}
|
|
358
|
-
requestAnimationFrameCallback() {
|
|
359
|
-
this.renderVideoFrame();
|
|
360
|
-
this.rafId = window.requestAnimationFrame(this.requestAnimationFrameCallback.bind(this));
|
|
361
|
-
}
|
|
362
|
-
renderBlack() {
|
|
363
|
-
if (!this.canvas) {
|
|
364
|
-
return;
|
|
365
|
-
}
|
|
366
|
-
const canvas = this.canvas.current;
|
|
367
|
-
if (!canvas) {
|
|
368
|
-
return;
|
|
369
|
-
}
|
|
370
|
-
const context = canvas.getContext('2d');
|
|
371
|
-
if (!context) {
|
|
372
|
-
return;
|
|
373
|
-
}
|
|
374
|
-
context.fillStyle = 'black';
|
|
375
|
-
context.fillRect(0, 0, canvas.width, canvas.height);
|
|
376
|
-
}
|
|
377
|
-
renderVideoFrame() {
|
|
378
|
-
if (!this.source || !this.canvas) {
|
|
379
|
-
return;
|
|
380
|
-
}
|
|
381
|
-
const canvas = this.canvas.current;
|
|
382
|
-
if (!canvas) {
|
|
383
|
-
return;
|
|
384
|
-
}
|
|
385
|
-
const context = canvas.getContext('2d');
|
|
386
|
-
if (!context) {
|
|
387
|
-
return;
|
|
388
|
-
}
|
|
389
|
-
const frame = this.source.receiveVideoFrame(this.buffer, exports.MAX_VIDEO_CAPTURE_WIDTH, exports.MAX_VIDEO_CAPTURE_HEIGHT);
|
|
390
|
-
if (!frame) {
|
|
391
|
-
return;
|
|
392
|
-
}
|
|
393
|
-
const [width, height] = frame;
|
|
394
|
-
if (canvas.clientWidth <= 0 ||
|
|
395
|
-
width <= 0 ||
|
|
396
|
-
canvas.clientHeight <= 0 ||
|
|
397
|
-
height <= 0) {
|
|
398
|
-
return;
|
|
399
|
-
}
|
|
400
|
-
const frameAspectRatio = width / height;
|
|
401
|
-
const canvasAspectRatio = canvas.clientWidth / canvas.clientHeight;
|
|
402
|
-
let dx = 0;
|
|
403
|
-
let dy = 0;
|
|
404
|
-
if (frameAspectRatio > canvasAspectRatio) {
|
|
405
|
-
// Frame wider than view: We need bars at the top and bottom
|
|
406
|
-
canvas.width = width;
|
|
407
|
-
canvas.height = width / canvasAspectRatio;
|
|
408
|
-
dy = (canvas.height - height) / 2;
|
|
409
|
-
}
|
|
410
|
-
else if (frameAspectRatio < canvasAspectRatio) {
|
|
411
|
-
// Frame narrower than view: We need pillars on the sides
|
|
412
|
-
canvas.width = height * canvasAspectRatio;
|
|
413
|
-
canvas.height = height;
|
|
414
|
-
dx = (canvas.width - width) / 2;
|
|
415
|
-
}
|
|
416
|
-
else {
|
|
417
|
-
// Will stretch perfectly with no bars
|
|
418
|
-
canvas.width = width;
|
|
419
|
-
canvas.height = height;
|
|
420
|
-
}
|
|
421
|
-
if (dx > 0 || dy > 0) {
|
|
422
|
-
context.fillStyle = 'black';
|
|
423
|
-
context.fillRect(0, 0, canvas.width, canvas.height);
|
|
424
|
-
}
|
|
425
|
-
if (this.imageData?.width !== width || this.imageData?.height !== height) {
|
|
426
|
-
this.imageData = new ImageData(width, height);
|
|
427
|
-
}
|
|
428
|
-
this.imageData.data.set(this.buffer.subarray(0, width * height * 4));
|
|
429
|
-
context.putImageData(this.imageData, dx, dy);
|
|
430
|
-
}
|
|
431
|
-
}
|
|
432
|
-
exports.CanvasVideoRenderer = CanvasVideoRenderer;
|
|
433
|
-
//# sourceMappingURL=VideoSupport.js.map
|