@signalapp/ringrtc 2.50.5 → 2.52.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/acknowledgments.md +1 -1
- package/dist/index.d.ts +1 -2
- package/dist/index.js +3 -8
- package/dist/ringrtc/Service.d.ts +58 -26
- package/dist/ringrtc/Service.js +132 -161
- package/package.json +2 -4
- package/dist/ringrtc/VideoSupport.d.ts +0 -77
- package/dist/ringrtc/VideoSupport.js +0 -433
package/dist/acknowledgments.md
CHANGED
|
@@ -669,7 +669,7 @@ For more information on this, and how to apply and follow the GNU AGPL, see
|
|
|
669
669
|
|
|
670
670
|
```
|
|
671
671
|
|
|
672
|
-
## libsignal-account-keys 0.1.0, libsignal-core 0.1.0, mrp 2.
|
|
672
|
+
## libsignal-account-keys 0.1.0, libsignal-core 0.1.0, mrp 2.52.0, protobuf 2.52.0, ringrtc 2.52.0, regex-aot 0.1.0, partial-default-derive 0.1.0
|
|
673
673
|
|
|
674
674
|
```
|
|
675
675
|
GNU AFFERO GENERAL PUBLIC LICENSE
|
package/dist/index.d.ts
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
import { RingRTCType } from './ringrtc/Service';
|
|
2
|
-
export { AnswerMessage, AudioDevice, DataMode, BusyMessage, Call, CallEndedReason, CallId, CallLogLevel, CallMessageUrgency, CallSettings, CallState, CallingMessage, ConnectionState, DeviceId, GroupCall, GroupCallEndReason, GroupCallKind, GroupCallObserver, GroupMemberInfo, HangupMessage, HangupType, HttpMethod, HttpResult, IceCandidateMessage, JoinState, LocalDeviceState, OfferMessage, OfferType, OpaqueMessage, PeekDeviceInfo, PeekInfo, PeekStatusCodes, Reaction, RemoteDeviceState, RingCancelReason, RingRTCType, RingUpdate, SpeechEvent, UserId,
|
|
3
|
-
export { CanvasVideoRenderer, GumVideoCapturer, VideoFrameSource, MAX_VIDEO_CAPTURE_AREA, MAX_VIDEO_CAPTURE_BUFFER_SIZE, MAX_VIDEO_CAPTURE_HEIGHT, MAX_VIDEO_CAPTURE_WIDTH, } from './ringrtc/VideoSupport';
|
|
2
|
+
export { AnswerMessage, AudioDevice, DataMode, BusyMessage, Call, CallEndedReason, CallId, CallLogLevel, CallMessageUrgency, CallSettings, CallState, CallingMessage, ConnectionState, DeviceId, GroupCall, GroupCallEndReason, GroupCallKind, GroupCallObserver, GroupMemberInfo, HangupMessage, HangupType, HttpMethod, HttpResult, IceCandidateMessage, JoinState, LocalDeviceState, OfferMessage, OfferType, OpaqueMessage, PeekDeviceInfo, PeekInfo, PeekStatusCodes, Reaction, RemoteDeviceState, RingCancelReason, RingRTCType, RingUpdate, SpeechEvent, UserId, VideoFrameSender, VideoFrameSource, VideoPixelFormatEnum, videoPixelFormatToEnum, VideoRequest, callIdFromEra, callIdFromRingId, } from './ringrtc/Service';
|
|
4
3
|
export { CallLinkRootKey, CallLinkRestrictions, CallLinkState, } from './ringrtc/CallLinks';
|
|
5
4
|
export declare const RingRTC: RingRTCType;
|
package/dist/index.js
CHANGED
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
// SPDX-License-Identifier: AGPL-3.0-only
|
|
5
5
|
//
|
|
6
6
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
-
exports.RingRTC = exports.CallLinkState = exports.CallLinkRestrictions = exports.CallLinkRootKey = exports.
|
|
7
|
+
exports.RingRTC = exports.CallLinkState = exports.CallLinkRestrictions = exports.CallLinkRootKey = exports.callIdFromRingId = exports.callIdFromEra = exports.VideoRequest = exports.videoPixelFormatToEnum = exports.VideoPixelFormatEnum = exports.SpeechEvent = exports.RingUpdate = exports.RingRTCType = exports.RingCancelReason = exports.RemoteDeviceState = exports.PeekStatusCodes = exports.OpaqueMessage = exports.OfferType = exports.OfferMessage = exports.LocalDeviceState = exports.JoinState = exports.IceCandidateMessage = exports.HttpMethod = exports.HangupType = exports.HangupMessage = exports.GroupMemberInfo = exports.GroupCallKind = exports.GroupCallEndReason = exports.GroupCall = exports.ConnectionState = exports.CallingMessage = exports.CallState = exports.CallMessageUrgency = exports.CallLogLevel = exports.CallEndedReason = exports.Call = exports.BusyMessage = exports.DataMode = exports.AnswerMessage = void 0;
|
|
8
8
|
const Service_1 = require("./ringrtc/Service");
|
|
9
9
|
var Service_2 = require("./ringrtc/Service");
|
|
10
10
|
Object.defineProperty(exports, "AnswerMessage", { enumerable: true, get: function () { return Service_2.AnswerMessage; } });
|
|
@@ -36,16 +36,11 @@ Object.defineProperty(exports, "RingCancelReason", { enumerable: true, get: func
|
|
|
36
36
|
Object.defineProperty(exports, "RingRTCType", { enumerable: true, get: function () { return Service_2.RingRTCType; } });
|
|
37
37
|
Object.defineProperty(exports, "RingUpdate", { enumerable: true, get: function () { return Service_2.RingUpdate; } });
|
|
38
38
|
Object.defineProperty(exports, "SpeechEvent", { enumerable: true, get: function () { return Service_2.SpeechEvent; } });
|
|
39
|
+
Object.defineProperty(exports, "VideoPixelFormatEnum", { enumerable: true, get: function () { return Service_2.VideoPixelFormatEnum; } });
|
|
40
|
+
Object.defineProperty(exports, "videoPixelFormatToEnum", { enumerable: true, get: function () { return Service_2.videoPixelFormatToEnum; } });
|
|
39
41
|
Object.defineProperty(exports, "VideoRequest", { enumerable: true, get: function () { return Service_2.VideoRequest; } });
|
|
40
42
|
Object.defineProperty(exports, "callIdFromEra", { enumerable: true, get: function () { return Service_2.callIdFromEra; } });
|
|
41
43
|
Object.defineProperty(exports, "callIdFromRingId", { enumerable: true, get: function () { return Service_2.callIdFromRingId; } });
|
|
42
|
-
var VideoSupport_1 = require("./ringrtc/VideoSupport");
|
|
43
|
-
Object.defineProperty(exports, "CanvasVideoRenderer", { enumerable: true, get: function () { return VideoSupport_1.CanvasVideoRenderer; } });
|
|
44
|
-
Object.defineProperty(exports, "GumVideoCapturer", { enumerable: true, get: function () { return VideoSupport_1.GumVideoCapturer; } });
|
|
45
|
-
Object.defineProperty(exports, "MAX_VIDEO_CAPTURE_AREA", { enumerable: true, get: function () { return VideoSupport_1.MAX_VIDEO_CAPTURE_AREA; } });
|
|
46
|
-
Object.defineProperty(exports, "MAX_VIDEO_CAPTURE_BUFFER_SIZE", { enumerable: true, get: function () { return VideoSupport_1.MAX_VIDEO_CAPTURE_BUFFER_SIZE; } });
|
|
47
|
-
Object.defineProperty(exports, "MAX_VIDEO_CAPTURE_HEIGHT", { enumerable: true, get: function () { return VideoSupport_1.MAX_VIDEO_CAPTURE_HEIGHT; } });
|
|
48
|
-
Object.defineProperty(exports, "MAX_VIDEO_CAPTURE_WIDTH", { enumerable: true, get: function () { return VideoSupport_1.MAX_VIDEO_CAPTURE_WIDTH; } });
|
|
49
44
|
var CallLinks_1 = require("./ringrtc/CallLinks");
|
|
50
45
|
Object.defineProperty(exports, "CallLinkRootKey", { enumerable: true, get: function () { return CallLinks_1.CallLinkRootKey; } });
|
|
51
46
|
Object.defineProperty(exports, "CallLinkRestrictions", { enumerable: true, get: function () { return CallLinks_1.CallLinkRestrictions; } });
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import { GumVideoCaptureOptions, VideoPixelFormatEnum } from './VideoSupport';
|
|
2
1
|
import { CallLinkState, CallLinkRestrictions, CallLinkRootKey } from './CallLinks';
|
|
3
2
|
export declare const callIdFromEra: (era: string) => CallId;
|
|
4
3
|
export declare function callIdFromRingId(ringId: bigint): CallId;
|
|
@@ -248,6 +247,8 @@ export declare class RingRTCType {
|
|
|
248
247
|
groupCallRingUpdate(groupId: GroupId, ringId: bigint, sender: GroupCallUserId, state: RingUpdate): void;
|
|
249
248
|
handleRtcStatsReportComplete(reportJson: string): void;
|
|
250
249
|
handleSpeechEvent(clientId: GroupCallClientId, event: SpeechEvent): void;
|
|
250
|
+
onRemoteMute(clientId: GroupCallClientId, demuxId: number): void;
|
|
251
|
+
onObservedRemoteMute(clientId: GroupCallClientId, sourceDemuxId: number, targetDemuxId: number): void;
|
|
251
252
|
onLogMessage(level: number, fileName: string, line: number, message: string): void;
|
|
252
253
|
logError(message: string): void;
|
|
253
254
|
logWarn(message: string): void;
|
|
@@ -270,15 +271,10 @@ export declare class RingRTCType {
|
|
|
270
271
|
sendCallMessageToGroup(groupId: Buffer, message: Buffer, urgency: CallMessageUrgency, overrideRecipients: Array<Buffer>): void;
|
|
271
272
|
get call(): Call | null;
|
|
272
273
|
getCall(callId: CallId): Call | null;
|
|
273
|
-
accept(callId: CallId
|
|
274
|
+
accept(callId: CallId): void;
|
|
274
275
|
decline(callId: CallId): void;
|
|
275
276
|
ignore(callId: CallId): void;
|
|
276
277
|
hangup(callId: CallId): void;
|
|
277
|
-
setOutgoingAudio(callId: CallId, enabled: boolean): void;
|
|
278
|
-
setOutgoingVideo(callId: CallId, enabled: boolean): void;
|
|
279
|
-
setOutgoingVideoIsScreenShare(callId: CallId, isScreenShare: boolean): void;
|
|
280
|
-
setVideoCapturer(callId: CallId, capturer: VideoCapturer | null): void;
|
|
281
|
-
setVideoRenderer(callId: CallId, renderer: VideoRenderer | null): void;
|
|
282
278
|
getAudioInputs(): Array<AudioDevice>;
|
|
283
279
|
setAudioInput(index: number): void;
|
|
284
280
|
getAudioOutputs(): Array<AudioDevice>;
|
|
@@ -303,14 +299,50 @@ export interface AudioDevice {
|
|
|
303
299
|
uniqueId: string;
|
|
304
300
|
i18nKey?: string;
|
|
305
301
|
}
|
|
306
|
-
export
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
302
|
+
export declare enum VideoPixelFormatEnum {
|
|
303
|
+
I420 = 0,
|
|
304
|
+
Nv12 = 1,
|
|
305
|
+
Rgba = 2
|
|
306
|
+
}
|
|
307
|
+
export declare function videoPixelFormatToEnum(format: VideoPixelFormat): VideoPixelFormatEnum | undefined;
|
|
308
|
+
/**
|
|
309
|
+
* Interface for sending video frames to the RingRTC library.
|
|
310
|
+
*
|
|
311
|
+
* VideoFrameSender is used to transmit video frames (from a camera or screen share) over
|
|
312
|
+
* RTP via the RingRTC library.
|
|
313
|
+
*/
|
|
314
|
+
export interface VideoFrameSender {
|
|
315
|
+
/**
|
|
316
|
+
* Sends a video frame to be transmitted via RingRTC.
|
|
317
|
+
*
|
|
318
|
+
* @param width - The width of the video frame in pixels
|
|
319
|
+
* @param height - The height of the video frame in pixels
|
|
320
|
+
* @param format - The pixel format of the video data
|
|
321
|
+
* @param buffer - Buffer containing the raw video frame data
|
|
322
|
+
*/
|
|
323
|
+
sendVideoFrame(width: number, height: number, format: VideoPixelFormatEnum, buffer: Buffer): void;
|
|
310
324
|
}
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
325
|
+
/**
|
|
326
|
+
* Interface for retrieving received video frames from the RingRTC library.
|
|
327
|
+
*/
|
|
328
|
+
export interface VideoFrameSource {
|
|
329
|
+
/**
|
|
330
|
+
* Copies the latest frame into `buffer`.
|
|
331
|
+
*
|
|
332
|
+
* Note that `maxWidth` and `maxHeight` specify maximum dimensions, but allow for rotation,
|
|
333
|
+
* i.e. a maximum of 1920x1080 will also allow portrait-mode 1080x1920.
|
|
334
|
+
*
|
|
335
|
+
* @param buffer - The destination buffer where the frame will be copied
|
|
336
|
+
* @param maxWidth - Maximum width of the frame to receive
|
|
337
|
+
* @param maxHeight - Maximum height of the frame to receive
|
|
338
|
+
* @returns
|
|
339
|
+
* A tuple of [width, height] of the received frame, containing:
|
|
340
|
+
* - The width in pixels of the received frame
|
|
341
|
+
* - The height in pixels of the received frame
|
|
342
|
+
*
|
|
343
|
+
* Returns undefined if no new frame is available
|
|
344
|
+
*/
|
|
345
|
+
receiveVideoFrame(buffer: Buffer, maxWidth: number, maxHeight: number): [number, number] | undefined;
|
|
314
346
|
}
|
|
315
347
|
export declare class Call {
|
|
316
348
|
private readonly _callManager;
|
|
@@ -319,6 +351,7 @@ export declare class Call {
|
|
|
319
351
|
private readonly _isIncoming;
|
|
320
352
|
private readonly _isVideoCall;
|
|
321
353
|
private _state;
|
|
354
|
+
private _mediaSessionStarted;
|
|
322
355
|
private _outgoingAudioEnabled;
|
|
323
356
|
private _outgoingVideoEnabled;
|
|
324
357
|
private _outgoingVideoIsScreenShare;
|
|
@@ -328,8 +361,6 @@ export declare class Call {
|
|
|
328
361
|
remoteAudioLevel: NormalizedAudioLevel;
|
|
329
362
|
remoteSharingScreen: boolean;
|
|
330
363
|
networkRoute: NetworkRoute;
|
|
331
|
-
private _videoCapturer;
|
|
332
|
-
private _videoRenderer;
|
|
333
364
|
endedReason?: CallEndedReason;
|
|
334
365
|
handleStateChanged?: () => void;
|
|
335
366
|
handleRemoteAudioEnabled?: () => void;
|
|
@@ -355,27 +386,20 @@ export declare class Call {
|
|
|
355
386
|
get state(): CallState;
|
|
356
387
|
set state(state: CallState);
|
|
357
388
|
setCallEnded(): void;
|
|
358
|
-
set videoCapturer(capturer: VideoCapturer | null);
|
|
359
|
-
set videoRenderer(renderer: VideoRenderer | null);
|
|
360
389
|
accept(): void;
|
|
361
390
|
decline(): void;
|
|
362
391
|
ignore(): void;
|
|
363
392
|
hangup(): void;
|
|
364
|
-
get outgoingAudioEnabled(): boolean;
|
|
365
|
-
set outgoingAudioEnabled(enabled: boolean);
|
|
366
|
-
get outgoingVideoEnabled(): boolean;
|
|
367
|
-
set outgoingVideoEnabled(enabled: boolean);
|
|
368
|
-
set outgoingVideoIsScreenShare(isScreenShare: boolean);
|
|
369
393
|
get remoteAudioEnabled(): boolean;
|
|
370
394
|
set remoteAudioEnabled(enabled: boolean);
|
|
371
395
|
get remoteVideoEnabled(): boolean;
|
|
372
396
|
set remoteVideoEnabled(enabled: boolean);
|
|
397
|
+
setOutgoingAudioMuted(muted: boolean): void;
|
|
398
|
+
setOutgoingVideoMuted(muted: boolean): void;
|
|
399
|
+
setOutgoingVideoIsScreenShare(isScreenShare: boolean): void;
|
|
373
400
|
sendVideoFrame(width: number, height: number, format: VideoPixelFormatEnum, buffer: Buffer): void;
|
|
374
401
|
receiveVideoFrame(buffer: Buffer, maxWidth: number, maxHeight: number): [number, number] | undefined;
|
|
375
|
-
private enableOrDisableCapturer;
|
|
376
|
-
private setOutgoingVideoEnabled;
|
|
377
402
|
updateDataMode(dataMode: DataMode): void;
|
|
378
|
-
private enableOrDisableRenderer;
|
|
379
403
|
}
|
|
380
404
|
export type GroupCallClientId = number;
|
|
381
405
|
export declare enum ConnectionState {
|
|
@@ -487,6 +511,8 @@ export interface GroupCallObserver {
|
|
|
487
511
|
onPeekChanged(groupCall: GroupCall): void;
|
|
488
512
|
onEnded(groupCall: GroupCall, reason: GroupCallEndReason): void;
|
|
489
513
|
onSpeechEvent(groupCall: GroupCall, event: SpeechEvent): void;
|
|
514
|
+
onRemoteMute(groupCall: GroupCall, demuxId: number): void;
|
|
515
|
+
onObservedRemoteMute(groupCall: GroupCall, sourceDemuxId: number, targetDemuxId: number): void;
|
|
490
516
|
}
|
|
491
517
|
export declare class GroupCall {
|
|
492
518
|
private readonly _kind;
|
|
@@ -508,6 +534,8 @@ export declare class GroupCall {
|
|
|
508
534
|
getPeekInfo(): PeekInfo | undefined;
|
|
509
535
|
getCallId(): CallId | undefined;
|
|
510
536
|
setOutgoingAudioMuted(muted: boolean): void;
|
|
537
|
+
setOutgoingAudioMutedRemotely(source: number): void;
|
|
538
|
+
sendRemoteMuteRequest(target: number): void;
|
|
511
539
|
react(value: string): void;
|
|
512
540
|
raiseHand(raise: boolean): void;
|
|
513
541
|
setOutgoingVideoMuted(muted: boolean): void;
|
|
@@ -540,6 +568,8 @@ export declare class GroupCall {
|
|
|
540
568
|
setRemoteAspectRatio(remoteDemuxId: number, aspectRatio: number): void;
|
|
541
569
|
setRtcStatsInterval(intervalMillis: number): void;
|
|
542
570
|
handleSpeechEvent(event: SpeechEvent): void;
|
|
571
|
+
onRemoteMute(demuxId: number): void;
|
|
572
|
+
onObservedRemoteMute(sourceDemuxId: number, targetDemuxId: number): void;
|
|
543
573
|
}
|
|
544
574
|
declare class GroupCallVideoFrameSource {
|
|
545
575
|
private readonly _callManager;
|
|
@@ -648,6 +678,8 @@ export interface CallManager {
|
|
|
648
678
|
leave(clientId: GroupCallClientId): void;
|
|
649
679
|
disconnect(clientId: GroupCallClientId): void;
|
|
650
680
|
setOutgoingAudioMuted(clientId: GroupCallClientId, muted: boolean): void;
|
|
681
|
+
setOutgoingAudioMutedRemotely(clientId: GroupCallClientId, source: number): void;
|
|
682
|
+
sendRemoteMuteRequest(clientId: GroupCallClientId, target: number): void;
|
|
651
683
|
setOutgoingVideoMuted(clientId: GroupCallClientId, muted: boolean): void;
|
|
652
684
|
setPresenting(clientId: GroupCallClientId, presenting: boolean): void;
|
|
653
685
|
setOutgoingGroupCallVideoIsScreenShare(clientId: GroupCallClientId, isScreenShare: boolean): void;
|
package/dist/ringrtc/Service.js
CHANGED
|
@@ -7,8 +7,10 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
7
7
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
8
8
|
};
|
|
9
9
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
10
|
-
exports.CallLogLevel = exports.CallEndedReason = exports.CallState = exports.RingCancelReason = exports.DataMode = exports.HangupType = exports.OpaqueMessage = exports.HangupMessage = exports.BusyMessage = exports.IceCandidateMessage = exports.AnswerMessage = exports.OfferType = exports.OfferMessage = exports.CallingMessage = exports.GroupCall = exports.GroupCallKind = exports.VideoRequest = exports.GroupMemberInfo = exports.RemoteDeviceState = exports.LocalDeviceState = exports.HttpMethod = exports.RingUpdate = exports.CallMessageUrgency = exports.SpeechEvent = exports.GroupCallEndReason = exports.JoinState = exports.ConnectionState = exports.Call = exports.RingRTCType = exports.ReceivedAudioLevel = exports.NetworkRoute = exports.PeekStatusCodes = exports.callIdFromEra = void 0;
|
|
10
|
+
exports.CallLogLevel = exports.CallEndedReason = exports.CallState = exports.RingCancelReason = exports.DataMode = exports.HangupType = exports.OpaqueMessage = exports.HangupMessage = exports.BusyMessage = exports.IceCandidateMessage = exports.AnswerMessage = exports.OfferType = exports.OfferMessage = exports.CallingMessage = exports.GroupCall = exports.GroupCallKind = exports.VideoRequest = exports.GroupMemberInfo = exports.RemoteDeviceState = exports.LocalDeviceState = exports.HttpMethod = exports.RingUpdate = exports.CallMessageUrgency = exports.SpeechEvent = exports.GroupCallEndReason = exports.JoinState = exports.ConnectionState = exports.Call = exports.VideoPixelFormatEnum = exports.RingRTCType = exports.ReceivedAudioLevel = exports.NetworkRoute = exports.PeekStatusCodes = exports.callIdFromEra = void 0;
|
|
11
11
|
exports.callIdFromRingId = callIdFromRingId;
|
|
12
|
+
exports.videoPixelFormatToEnum = videoPixelFormatToEnum;
|
|
13
|
+
/* eslint-disable max-classes-per-file */
|
|
12
14
|
const CallLinks_1 = require("./CallLinks");
|
|
13
15
|
const Native_1 = __importDefault(require("./Native"));
|
|
14
16
|
const INVALID_CLIENT_ID = 0;
|
|
@@ -111,6 +113,10 @@ NativeCallManager.prototype.groupReact = Native_1.default.cm_groupReact;
|
|
|
111
113
|
NativeCallManager.prototype.groupRaiseHand = Native_1.default.cm_groupRaiseHand;
|
|
112
114
|
NativeCallManager.prototype.setOutgoingAudioMuted =
|
|
113
115
|
Native_1.default.cm_setOutgoingAudioMuted;
|
|
116
|
+
NativeCallManager.prototype.setOutgoingAudioMutedRemotely =
|
|
117
|
+
Native_1.default.cm_setOutgoingAudioMutedRemotely;
|
|
118
|
+
NativeCallManager.prototype.sendRemoteMuteRequest =
|
|
119
|
+
Native_1.default.cm_sendRemoteMuteRequest;
|
|
114
120
|
NativeCallManager.prototype.setOutgoingVideoMuted =
|
|
115
121
|
Native_1.default.cm_setOutgoingVideoMuted;
|
|
116
122
|
NativeCallManager.prototype.setOutgoingGroupCallVideoIsScreenShare =
|
|
@@ -287,7 +293,6 @@ class RingRTCType {
|
|
|
287
293
|
const isIncoming = false;
|
|
288
294
|
const call = new Call(this.callManager, remoteUserId, callId, isIncoming, isVideoCall, CallState.Prering);
|
|
289
295
|
this._call = call;
|
|
290
|
-
call.outgoingVideoEnabled = isVideoCall;
|
|
291
296
|
return call;
|
|
292
297
|
}
|
|
293
298
|
// Called by UX
|
|
@@ -972,6 +977,24 @@ class RingRTCType {
|
|
|
972
977
|
});
|
|
973
978
|
}
|
|
974
979
|
// Called by Rust
|
|
980
|
+
onRemoteMute(clientId, demuxId) {
|
|
981
|
+
sillyDeadlockProtection(() => {
|
|
982
|
+
const groupCall = this._groupCallByClientId.get(clientId);
|
|
983
|
+
if (groupCall) {
|
|
984
|
+
groupCall.onRemoteMute(demuxId);
|
|
985
|
+
}
|
|
986
|
+
});
|
|
987
|
+
}
|
|
988
|
+
// Called by Rust.
|
|
989
|
+
onObservedRemoteMute(clientId, sourceDemuxId, targetDemuxId) {
|
|
990
|
+
sillyDeadlockProtection(() => {
|
|
991
|
+
const groupCall = this._groupCallByClientId.get(clientId);
|
|
992
|
+
if (groupCall) {
|
|
993
|
+
groupCall.onObservedRemoteMute(sourceDemuxId, targetDemuxId);
|
|
994
|
+
}
|
|
995
|
+
});
|
|
996
|
+
}
|
|
997
|
+
// Called by Rust
|
|
975
998
|
onLogMessage(level, fileName, line, message) {
|
|
976
999
|
if (this.handleLogMessage) {
|
|
977
1000
|
this.handleLogMessage(level, fileName, line, message);
|
|
@@ -1111,14 +1134,12 @@ class RingRTCType {
|
|
|
1111
1134
|
}
|
|
1112
1135
|
return null;
|
|
1113
1136
|
}
|
|
1114
|
-
accept(callId
|
|
1137
|
+
accept(callId) {
|
|
1115
1138
|
const call = this.getCall(callId);
|
|
1116
1139
|
if (!call) {
|
|
1117
1140
|
return;
|
|
1118
1141
|
}
|
|
1119
1142
|
call.accept();
|
|
1120
|
-
call.outgoingAudioEnabled = true;
|
|
1121
|
-
call.outgoingVideoEnabled = asVideoCall;
|
|
1122
1143
|
}
|
|
1123
1144
|
decline(callId) {
|
|
1124
1145
|
const call = this.getCall(callId);
|
|
@@ -1141,41 +1162,6 @@ class RingRTCType {
|
|
|
1141
1162
|
}
|
|
1142
1163
|
call.hangup();
|
|
1143
1164
|
}
|
|
1144
|
-
setOutgoingAudio(callId, enabled) {
|
|
1145
|
-
const call = this.getCall(callId);
|
|
1146
|
-
if (!call) {
|
|
1147
|
-
return;
|
|
1148
|
-
}
|
|
1149
|
-
call.outgoingAudioEnabled = enabled;
|
|
1150
|
-
}
|
|
1151
|
-
setOutgoingVideo(callId, enabled) {
|
|
1152
|
-
const call = this.getCall(callId);
|
|
1153
|
-
if (!call) {
|
|
1154
|
-
return;
|
|
1155
|
-
}
|
|
1156
|
-
call.outgoingVideoEnabled = enabled;
|
|
1157
|
-
}
|
|
1158
|
-
setOutgoingVideoIsScreenShare(callId, isScreenShare) {
|
|
1159
|
-
const call = this.getCall(callId);
|
|
1160
|
-
if (!call) {
|
|
1161
|
-
return;
|
|
1162
|
-
}
|
|
1163
|
-
call.outgoingVideoIsScreenShare = isScreenShare;
|
|
1164
|
-
}
|
|
1165
|
-
setVideoCapturer(callId, capturer) {
|
|
1166
|
-
const call = this.getCall(callId);
|
|
1167
|
-
if (!call) {
|
|
1168
|
-
return;
|
|
1169
|
-
}
|
|
1170
|
-
call.videoCapturer = capturer;
|
|
1171
|
-
}
|
|
1172
|
-
setVideoRenderer(callId, renderer) {
|
|
1173
|
-
const call = this.getCall(callId);
|
|
1174
|
-
if (!call) {
|
|
1175
|
-
return;
|
|
1176
|
-
}
|
|
1177
|
-
call.videoRenderer = renderer;
|
|
1178
|
-
}
|
|
1179
1165
|
getAudioInputs() {
|
|
1180
1166
|
return this.callManager.getAudioInputs();
|
|
1181
1167
|
}
|
|
@@ -1190,8 +1176,30 @@ class RingRTCType {
|
|
|
1190
1176
|
}
|
|
1191
1177
|
}
|
|
1192
1178
|
exports.RingRTCType = RingRTCType;
|
|
1179
|
+
// Given a weird name to not conflict with WebCodec's VideoPixelFormat
|
|
1180
|
+
var VideoPixelFormatEnum;
|
|
1181
|
+
(function (VideoPixelFormatEnum) {
|
|
1182
|
+
VideoPixelFormatEnum[VideoPixelFormatEnum["I420"] = 0] = "I420";
|
|
1183
|
+
VideoPixelFormatEnum[VideoPixelFormatEnum["Nv12"] = 1] = "Nv12";
|
|
1184
|
+
VideoPixelFormatEnum[VideoPixelFormatEnum["Rgba"] = 2] = "Rgba";
|
|
1185
|
+
})(VideoPixelFormatEnum || (exports.VideoPixelFormatEnum = VideoPixelFormatEnum = {}));
|
|
1186
|
+
function videoPixelFormatToEnum(format) {
|
|
1187
|
+
switch (format) {
|
|
1188
|
+
case 'I420': {
|
|
1189
|
+
return VideoPixelFormatEnum.I420;
|
|
1190
|
+
}
|
|
1191
|
+
case 'NV12': {
|
|
1192
|
+
return VideoPixelFormatEnum.Nv12;
|
|
1193
|
+
}
|
|
1194
|
+
case 'RGBA': {
|
|
1195
|
+
return VideoPixelFormatEnum.Rgba;
|
|
1196
|
+
}
|
|
1197
|
+
}
|
|
1198
|
+
}
|
|
1193
1199
|
class Call {
|
|
1194
1200
|
constructor(callManager, remoteUserId, callId, isIncoming, isVideoCall, state) {
|
|
1201
|
+
// Media state flags.
|
|
1202
|
+
this._mediaSessionStarted = false;
|
|
1195
1203
|
this._outgoingAudioEnabled = false;
|
|
1196
1204
|
this._outgoingVideoEnabled = false;
|
|
1197
1205
|
this._outgoingVideoIsScreenShare = false;
|
|
@@ -1201,8 +1209,6 @@ class Call {
|
|
|
1201
1209
|
this.remoteAudioLevel = 0;
|
|
1202
1210
|
this.remoteSharingScreen = false;
|
|
1203
1211
|
this.networkRoute = new NetworkRoute();
|
|
1204
|
-
this._videoCapturer = null;
|
|
1205
|
-
this._videoRenderer = null;
|
|
1206
1212
|
this._callManager = callManager;
|
|
1207
1213
|
this._remoteUserId = remoteUserId;
|
|
1208
1214
|
this.callId = callId;
|
|
@@ -1228,11 +1234,39 @@ class Call {
|
|
|
1228
1234
|
}
|
|
1229
1235
|
this._state = state;
|
|
1230
1236
|
if (state === CallState.Accepted) {
|
|
1231
|
-
//
|
|
1232
|
-
this.
|
|
1237
|
+
// We might have been in the reconnecting state and already started media.
|
|
1238
|
+
if (!this._mediaSessionStarted) {
|
|
1239
|
+
sillyDeadlockProtection(() => {
|
|
1240
|
+
if (this._outgoingAudioEnabled) {
|
|
1241
|
+
this._callManager.setOutgoingAudioEnabled(true);
|
|
1242
|
+
}
|
|
1243
|
+
if (this._outgoingVideoIsScreenShare) {
|
|
1244
|
+
this._callManager.setOutgoingVideoIsScreenShare(true);
|
|
1245
|
+
this._callManager.setOutgoingVideoEnabled(true);
|
|
1246
|
+
}
|
|
1247
|
+
else if (this._outgoingVideoEnabled) {
|
|
1248
|
+
this._callManager.setOutgoingVideoEnabled(true);
|
|
1249
|
+
}
|
|
1250
|
+
});
|
|
1251
|
+
this._mediaSessionStarted = true;
|
|
1252
|
+
}
|
|
1253
|
+
}
|
|
1254
|
+
else if (state === CallState.Ended) {
|
|
1255
|
+
if (this._mediaSessionStarted) {
|
|
1256
|
+
sillyDeadlockProtection(() => {
|
|
1257
|
+
if (this._outgoingAudioEnabled) {
|
|
1258
|
+
this._callManager.setOutgoingAudioEnabled(false);
|
|
1259
|
+
}
|
|
1260
|
+
if (this._outgoingVideoEnabled) {
|
|
1261
|
+
this._callManager.setOutgoingVideoEnabled(false);
|
|
1262
|
+
}
|
|
1263
|
+
});
|
|
1264
|
+
this._outgoingAudioEnabled = false;
|
|
1265
|
+
this._outgoingVideoEnabled = false;
|
|
1266
|
+
this._outgoingVideoIsScreenShare = false;
|
|
1267
|
+
this._mediaSessionStarted = false;
|
|
1268
|
+
}
|
|
1233
1269
|
}
|
|
1234
|
-
this.enableOrDisableCapturer();
|
|
1235
|
-
this.enableOrDisableRenderer();
|
|
1236
1270
|
if (this.handleStateChanged) {
|
|
1237
1271
|
this.handleStateChanged();
|
|
1238
1272
|
}
|
|
@@ -1240,14 +1274,6 @@ class Call {
|
|
|
1240
1274
|
setCallEnded() {
|
|
1241
1275
|
this._state = CallState.Ended;
|
|
1242
1276
|
}
|
|
1243
|
-
set videoCapturer(capturer) {
|
|
1244
|
-
this._videoCapturer = capturer;
|
|
1245
|
-
this.enableOrDisableCapturer();
|
|
1246
|
-
}
|
|
1247
|
-
set videoRenderer(renderer) {
|
|
1248
|
-
this._videoRenderer = renderer;
|
|
1249
|
-
this.enableOrDisableRenderer();
|
|
1250
|
-
}
|
|
1251
1277
|
accept() {
|
|
1252
1278
|
this._callManager.accept(this.callId);
|
|
1253
1279
|
}
|
|
@@ -1258,43 +1284,10 @@ class Call {
|
|
|
1258
1284
|
this._callManager.ignore(this.callId);
|
|
1259
1285
|
}
|
|
1260
1286
|
hangup() {
|
|
1261
|
-
// This is a little faster than waiting for the
|
|
1262
|
-
// change in call state to come back.
|
|
1263
|
-
if (this._videoCapturer) {
|
|
1264
|
-
this._videoCapturer.disable();
|
|
1265
|
-
}
|
|
1266
|
-
if (this._videoRenderer) {
|
|
1267
|
-
this._videoRenderer.disable();
|
|
1268
|
-
}
|
|
1269
|
-
// This assumes we only have one active call.
|
|
1270
1287
|
sillyDeadlockProtection(() => {
|
|
1271
1288
|
this._callManager.hangup();
|
|
1272
1289
|
});
|
|
1273
1290
|
}
|
|
1274
|
-
get outgoingAudioEnabled() {
|
|
1275
|
-
return this._outgoingAudioEnabled;
|
|
1276
|
-
}
|
|
1277
|
-
set outgoingAudioEnabled(enabled) {
|
|
1278
|
-
this._outgoingAudioEnabled = enabled;
|
|
1279
|
-
// This assumes we only have one active call.
|
|
1280
|
-
sillyDeadlockProtection(() => {
|
|
1281
|
-
this._callManager.setOutgoingAudioEnabled(enabled);
|
|
1282
|
-
});
|
|
1283
|
-
}
|
|
1284
|
-
get outgoingVideoEnabled() {
|
|
1285
|
-
return this._outgoingVideoEnabled;
|
|
1286
|
-
}
|
|
1287
|
-
set outgoingVideoEnabled(enabled) {
|
|
1288
|
-
this._outgoingVideoEnabled = enabled;
|
|
1289
|
-
this.enableOrDisableCapturer();
|
|
1290
|
-
}
|
|
1291
|
-
set outgoingVideoIsScreenShare(isScreenShare) {
|
|
1292
|
-
// This assumes we only have one active call.
|
|
1293
|
-
this._outgoingVideoIsScreenShare = isScreenShare;
|
|
1294
|
-
sillyDeadlockProtection(() => {
|
|
1295
|
-
this._callManager.setOutgoingVideoIsScreenShare(isScreenShare);
|
|
1296
|
-
});
|
|
1297
|
-
}
|
|
1298
1291
|
get remoteAudioEnabled() {
|
|
1299
1292
|
return this._remoteAudioEnabled;
|
|
1300
1293
|
}
|
|
@@ -1306,63 +1299,51 @@ class Call {
|
|
|
1306
1299
|
}
|
|
1307
1300
|
set remoteVideoEnabled(enabled) {
|
|
1308
1301
|
this._remoteVideoEnabled = enabled;
|
|
1309
|
-
|
|
1302
|
+
}
|
|
1303
|
+
setOutgoingAudioMuted(muted) {
|
|
1304
|
+
const enabled = !muted;
|
|
1305
|
+
if (this._mediaSessionStarted && this._outgoingAudioEnabled !== enabled) {
|
|
1306
|
+
this._outgoingAudioEnabled = enabled;
|
|
1307
|
+
sillyDeadlockProtection(() => {
|
|
1308
|
+
this._callManager.setOutgoingAudioEnabled(enabled);
|
|
1309
|
+
});
|
|
1310
|
+
}
|
|
1311
|
+
else {
|
|
1312
|
+
this._outgoingAudioEnabled = enabled;
|
|
1313
|
+
}
|
|
1314
|
+
}
|
|
1315
|
+
setOutgoingVideoMuted(muted) {
|
|
1316
|
+
const enabled = !muted;
|
|
1317
|
+
if (this._mediaSessionStarted && this._outgoingVideoEnabled !== enabled) {
|
|
1318
|
+
this._outgoingVideoEnabled = enabled;
|
|
1319
|
+
sillyDeadlockProtection(() => {
|
|
1320
|
+
this._callManager.setOutgoingVideoEnabled(enabled);
|
|
1321
|
+
});
|
|
1322
|
+
}
|
|
1323
|
+
else {
|
|
1324
|
+
this._outgoingVideoEnabled = enabled;
|
|
1325
|
+
}
|
|
1326
|
+
}
|
|
1327
|
+
setOutgoingVideoIsScreenShare(isScreenShare) {
|
|
1328
|
+
if (this._mediaSessionStarted &&
|
|
1329
|
+
this._outgoingVideoIsScreenShare !== isScreenShare) {
|
|
1330
|
+
this._outgoingVideoIsScreenShare = isScreenShare;
|
|
1331
|
+
sillyDeadlockProtection(() => {
|
|
1332
|
+
this._callManager.setOutgoingVideoIsScreenShare(isScreenShare);
|
|
1333
|
+
});
|
|
1334
|
+
}
|
|
1335
|
+
else {
|
|
1336
|
+
this._outgoingVideoIsScreenShare = isScreenShare;
|
|
1337
|
+
}
|
|
1310
1338
|
}
|
|
1311
1339
|
// With this method, a Call is a VideoFrameSender
|
|
1312
1340
|
sendVideoFrame(width, height, format, buffer) {
|
|
1313
|
-
// This assumes we only have one active call.
|
|
1314
1341
|
this._callManager.sendVideoFrame(width, height, format, buffer);
|
|
1315
1342
|
}
|
|
1316
1343
|
// With this method, a Call is a VideoFrameSource
|
|
1317
1344
|
receiveVideoFrame(buffer, maxWidth, maxHeight) {
|
|
1318
|
-
// This assumes we only have one active call.
|
|
1319
1345
|
return this._callManager.receiveVideoFrame(buffer, maxWidth, maxHeight);
|
|
1320
1346
|
}
|
|
1321
|
-
enableOrDisableCapturer() {
|
|
1322
|
-
if (!this._videoCapturer) {
|
|
1323
|
-
return;
|
|
1324
|
-
}
|
|
1325
|
-
if (!this.outgoingVideoEnabled) {
|
|
1326
|
-
this._videoCapturer.disable();
|
|
1327
|
-
if (this.state === CallState.Accepted) {
|
|
1328
|
-
this.setOutgoingVideoEnabled(false);
|
|
1329
|
-
}
|
|
1330
|
-
return;
|
|
1331
|
-
}
|
|
1332
|
-
switch (this.state) {
|
|
1333
|
-
case CallState.Prering:
|
|
1334
|
-
case CallState.Ringing:
|
|
1335
|
-
this._videoCapturer.enableCapture();
|
|
1336
|
-
break;
|
|
1337
|
-
case CallState.Accepted:
|
|
1338
|
-
this._videoCapturer.enableCaptureAndSend(this);
|
|
1339
|
-
this.setOutgoingVideoEnabled(true);
|
|
1340
|
-
if (this._outgoingVideoIsScreenShare) {
|
|
1341
|
-
// Make sure the status gets sent.
|
|
1342
|
-
this.outgoingVideoIsScreenShare = true;
|
|
1343
|
-
}
|
|
1344
|
-
break;
|
|
1345
|
-
case CallState.Reconnecting:
|
|
1346
|
-
this._videoCapturer.enableCaptureAndSend(this);
|
|
1347
|
-
// Don't send status until we're reconnected.
|
|
1348
|
-
break;
|
|
1349
|
-
case CallState.Ended:
|
|
1350
|
-
this._videoCapturer.disable();
|
|
1351
|
-
break;
|
|
1352
|
-
default:
|
|
1353
|
-
}
|
|
1354
|
-
}
|
|
1355
|
-
setOutgoingVideoEnabled(enabled) {
|
|
1356
|
-
sillyDeadlockProtection(() => {
|
|
1357
|
-
try {
|
|
1358
|
-
this._callManager.setOutgoingVideoEnabled(enabled);
|
|
1359
|
-
}
|
|
1360
|
-
catch {
|
|
1361
|
-
// We may not have an active connection any more.
|
|
1362
|
-
// In which case it doesn't matter
|
|
1363
|
-
}
|
|
1364
|
-
});
|
|
1365
|
-
}
|
|
1366
1347
|
updateDataMode(dataMode) {
|
|
1367
1348
|
sillyDeadlockProtection(() => {
|
|
1368
1349
|
try {
|
|
@@ -1374,29 +1355,6 @@ class Call {
|
|
|
1374
1355
|
}
|
|
1375
1356
|
});
|
|
1376
1357
|
}
|
|
1377
|
-
enableOrDisableRenderer() {
|
|
1378
|
-
if (!this._videoRenderer) {
|
|
1379
|
-
return;
|
|
1380
|
-
}
|
|
1381
|
-
if (!this.remoteVideoEnabled) {
|
|
1382
|
-
this._videoRenderer.disable();
|
|
1383
|
-
return;
|
|
1384
|
-
}
|
|
1385
|
-
switch (this.state) {
|
|
1386
|
-
case CallState.Prering:
|
|
1387
|
-
case CallState.Ringing:
|
|
1388
|
-
this._videoRenderer.disable();
|
|
1389
|
-
break;
|
|
1390
|
-
case CallState.Accepted:
|
|
1391
|
-
case CallState.Reconnecting:
|
|
1392
|
-
this._videoRenderer.enable(this);
|
|
1393
|
-
break;
|
|
1394
|
-
case CallState.Ended:
|
|
1395
|
-
this._videoRenderer.disable();
|
|
1396
|
-
break;
|
|
1397
|
-
default:
|
|
1398
|
-
}
|
|
1399
|
-
}
|
|
1400
1358
|
}
|
|
1401
1359
|
exports.Call = Call;
|
|
1402
1360
|
// Represents the connection state to a media server for a group call.
|
|
@@ -1585,6 +1543,16 @@ class GroupCall {
|
|
|
1585
1543
|
this._observer.onLocalDeviceStateChanged(this);
|
|
1586
1544
|
}
|
|
1587
1545
|
// Called by UI
|
|
1546
|
+
setOutgoingAudioMutedRemotely(source) {
|
|
1547
|
+
this._localDeviceState.audioMuted = true;
|
|
1548
|
+
this._callManager.setOutgoingAudioMutedRemotely(this._clientId, source);
|
|
1549
|
+
this._observer.onLocalDeviceStateChanged(this);
|
|
1550
|
+
}
|
|
1551
|
+
// Called by UI
|
|
1552
|
+
sendRemoteMuteRequest(target) {
|
|
1553
|
+
this._callManager.sendRemoteMuteRequest(this._clientId, target);
|
|
1554
|
+
}
|
|
1555
|
+
// Called by UI
|
|
1588
1556
|
react(value) {
|
|
1589
1557
|
this._callManager.groupReact(this._clientId, value);
|
|
1590
1558
|
}
|
|
@@ -1722,7 +1690,6 @@ class GroupCall {
|
|
|
1722
1690
|
}
|
|
1723
1691
|
// With this, a GroupCall is a VideoFrameSender
|
|
1724
1692
|
sendVideoFrame(width, height, format, buffer) {
|
|
1725
|
-
// This assumes we only have one active call.
|
|
1726
1693
|
this._callManager.sendVideoFrame(width, height, format, buffer);
|
|
1727
1694
|
}
|
|
1728
1695
|
// With this, a GroupCall can provide a VideoFrameSource for each remote device.
|
|
@@ -1743,9 +1710,14 @@ class GroupCall {
|
|
|
1743
1710
|
handleSpeechEvent(event) {
|
|
1744
1711
|
this._observer.onSpeechEvent(this, event);
|
|
1745
1712
|
}
|
|
1713
|
+
onRemoteMute(demuxId) {
|
|
1714
|
+
this._observer.onRemoteMute(this, demuxId);
|
|
1715
|
+
}
|
|
1716
|
+
onObservedRemoteMute(sourceDemuxId, targetDemuxId) {
|
|
1717
|
+
this._observer.onObservedRemoteMute(this, sourceDemuxId, targetDemuxId);
|
|
1718
|
+
}
|
|
1746
1719
|
}
|
|
1747
1720
|
exports.GroupCall = GroupCall;
|
|
1748
|
-
// Implements VideoSource for use in CanvasVideoRenderer
|
|
1749
1721
|
class GroupCallVideoFrameSource {
|
|
1750
1722
|
constructor(callManager, groupCall, remoteDemuxId // Uint32
|
|
1751
1723
|
) {
|
|
@@ -1754,7 +1726,6 @@ class GroupCallVideoFrameSource {
|
|
|
1754
1726
|
this._remoteDemuxId = remoteDemuxId;
|
|
1755
1727
|
}
|
|
1756
1728
|
receiveVideoFrame(buffer, maxWidth, maxHeight) {
|
|
1757
|
-
// This assumes we only have one active call.
|
|
1758
1729
|
const frame = this._callManager.receiveGroupCallVideoFrame(this._groupCall.clientId, this._remoteDemuxId, buffer, maxWidth, maxHeight);
|
|
1759
1730
|
if (frame) {
|
|
1760
1731
|
const [width, height] = frame;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@signalapp/ringrtc",
|
|
3
|
-
"version": "2.
|
|
3
|
+
"version": "2.52.0",
|
|
4
4
|
"description": "Signal Messenger voice and video calling library.",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"types": "dist/index.d.ts",
|
|
@@ -25,7 +25,7 @@
|
|
|
25
25
|
},
|
|
26
26
|
"config": {
|
|
27
27
|
"prebuildUrl": "https://build-artifacts.signal.org/libraries/ringrtc-desktop-build-v${npm_package_version}.tar.gz",
|
|
28
|
-
"prebuildChecksum": "
|
|
28
|
+
"prebuildChecksum": "18d020c369ac1126df868d297b535518f6731c43d750c6bd3d9268044f23b855"
|
|
29
29
|
},
|
|
30
30
|
"author": "",
|
|
31
31
|
"license": "AGPL-3.0-only",
|
|
@@ -36,11 +36,9 @@
|
|
|
36
36
|
"devDependencies": {
|
|
37
37
|
"@types/chai": "4.3.16",
|
|
38
38
|
"@types/chai-as-promised": "^7.1.4",
|
|
39
|
-
"@types/dom-mediacapture-transform": "0.1.10",
|
|
40
39
|
"@types/lodash": "^4.14.106",
|
|
41
40
|
"@types/mocha": "10.0.9",
|
|
42
41
|
"@types/node": "20.17.6",
|
|
43
|
-
"@types/offscreencanvas": "^2019.7.3",
|
|
44
42
|
"@types/sinon-chai": "^3.2.12",
|
|
45
43
|
"chai": "4.4.1",
|
|
46
44
|
"chai-as-promised": "^7.1.1",
|
|
@@ -1,77 +0,0 @@
|
|
|
1
|
-
interface Ref<T> {
|
|
2
|
-
readonly current: T | null;
|
|
3
|
-
}
|
|
4
|
-
export declare enum VideoPixelFormatEnum {
|
|
5
|
-
I420 = 0,
|
|
6
|
-
Nv12 = 1,
|
|
7
|
-
Rgba = 2
|
|
8
|
-
}
|
|
9
|
-
export interface VideoFrameSource {
|
|
10
|
-
/**
|
|
11
|
-
* Copies the latest frame into `buffer`.
|
|
12
|
-
*
|
|
13
|
-
* Note that `maxWidth` and `maxHeight` specify maximum dimensions,
|
|
14
|
-
* but allow for rotation, i.e. a maximum of 1920x1080 will also allow
|
|
15
|
-
* portrait-mode 1080x1920.
|
|
16
|
-
*
|
|
17
|
-
* Returns a `[width, height]` pair for the resulting frame,
|
|
18
|
-
* or `undefined` if there's no new frame ready to be displayed.
|
|
19
|
-
*/
|
|
20
|
-
receiveVideoFrame(buffer: Buffer, maxWidth: number, maxHeight: number): [number, number] | undefined;
|
|
21
|
-
}
|
|
22
|
-
interface VideoFrameSender {
|
|
23
|
-
sendVideoFrame(width: number, height: number, format: VideoPixelFormatEnum, buffer: Buffer): void;
|
|
24
|
-
}
|
|
25
|
-
export declare class GumVideoCaptureOptions {
|
|
26
|
-
maxWidth: number;
|
|
27
|
-
maxHeight: number;
|
|
28
|
-
maxFramerate: number;
|
|
29
|
-
preferredDeviceId?: string;
|
|
30
|
-
screenShareSourceId?: string;
|
|
31
|
-
mediaStream?: MediaStream;
|
|
32
|
-
onEnded?: () => void;
|
|
33
|
-
}
|
|
34
|
-
export declare class GumVideoCapturer {
|
|
35
|
-
private defaultCaptureOptions;
|
|
36
|
-
private localPreview?;
|
|
37
|
-
private captureOptions?;
|
|
38
|
-
private sender?;
|
|
39
|
-
private mediaStream?;
|
|
40
|
-
private spawnedSenderRunning;
|
|
41
|
-
private preferredDeviceId?;
|
|
42
|
-
private updateLocalPreviewIntervalId?;
|
|
43
|
-
constructor(defaultCaptureOptions: GumVideoCaptureOptions);
|
|
44
|
-
capturing(): boolean;
|
|
45
|
-
setLocalPreview(localPreview: Ref<HTMLVideoElement> | undefined): void;
|
|
46
|
-
enableCapture(options?: GumVideoCaptureOptions): Promise<void>;
|
|
47
|
-
enableCaptureAndSend(sender?: VideoFrameSender, options?: GumVideoCaptureOptions): Promise<void>;
|
|
48
|
-
disable(): void;
|
|
49
|
-
setPreferredDevice(deviceId: string): Promise<void>;
|
|
50
|
-
enumerateDevices(): Promise<Array<MediaDeviceInfo>>;
|
|
51
|
-
private getUserMedia;
|
|
52
|
-
private startCapturing;
|
|
53
|
-
private stopCapturing;
|
|
54
|
-
private startSending;
|
|
55
|
-
private spawnSender;
|
|
56
|
-
private stopSending;
|
|
57
|
-
private updateLocalPreviewSourceObject;
|
|
58
|
-
}
|
|
59
|
-
export declare const MAX_VIDEO_CAPTURE_WIDTH: number;
|
|
60
|
-
export declare const MAX_VIDEO_CAPTURE_HEIGHT: number;
|
|
61
|
-
export declare const MAX_VIDEO_CAPTURE_AREA: number;
|
|
62
|
-
export declare const MAX_VIDEO_CAPTURE_BUFFER_SIZE: number;
|
|
63
|
-
export declare class CanvasVideoRenderer {
|
|
64
|
-
private canvas?;
|
|
65
|
-
private buffer;
|
|
66
|
-
private imageData?;
|
|
67
|
-
private source?;
|
|
68
|
-
private rafId?;
|
|
69
|
-
constructor();
|
|
70
|
-
setCanvas(canvas: Ref<HTMLCanvasElement> | undefined): void;
|
|
71
|
-
enable(source: VideoFrameSource): void;
|
|
72
|
-
disable(): void;
|
|
73
|
-
private requestAnimationFrameCallback;
|
|
74
|
-
private renderBlack;
|
|
75
|
-
private renderVideoFrame;
|
|
76
|
-
}
|
|
77
|
-
export {};
|
|
@@ -1,433 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
//
|
|
3
|
-
// Copyright 2019-2021 Signal Messenger, LLC
|
|
4
|
-
// SPDX-License-Identifier: AGPL-3.0-only
|
|
5
|
-
//
|
|
6
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
-
exports.CanvasVideoRenderer = exports.MAX_VIDEO_CAPTURE_BUFFER_SIZE = exports.MAX_VIDEO_CAPTURE_AREA = exports.MAX_VIDEO_CAPTURE_HEIGHT = exports.MAX_VIDEO_CAPTURE_WIDTH = exports.GumVideoCapturer = exports.GumVideoCaptureOptions = exports.VideoPixelFormatEnum = void 0;
|
|
8
|
-
const index_1 = require("../index");
|
|
9
|
-
// Given a weird name to not conflict with WebCodec's VideoPixelFormat
|
|
10
|
-
var VideoPixelFormatEnum;
|
|
11
|
-
(function (VideoPixelFormatEnum) {
|
|
12
|
-
VideoPixelFormatEnum[VideoPixelFormatEnum["I420"] = 0] = "I420";
|
|
13
|
-
VideoPixelFormatEnum[VideoPixelFormatEnum["Nv12"] = 1] = "Nv12";
|
|
14
|
-
VideoPixelFormatEnum[VideoPixelFormatEnum["Rgba"] = 2] = "Rgba";
|
|
15
|
-
})(VideoPixelFormatEnum || (exports.VideoPixelFormatEnum = VideoPixelFormatEnum = {}));
|
|
16
|
-
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
17
|
-
function videoPixelFormatFromEnum(format) {
|
|
18
|
-
switch (format) {
|
|
19
|
-
case VideoPixelFormatEnum.I420: {
|
|
20
|
-
return 'I420';
|
|
21
|
-
}
|
|
22
|
-
case VideoPixelFormatEnum.Nv12: {
|
|
23
|
-
return 'NV12';
|
|
24
|
-
}
|
|
25
|
-
case VideoPixelFormatEnum.Rgba: {
|
|
26
|
-
return 'RGBA';
|
|
27
|
-
}
|
|
28
|
-
}
|
|
29
|
-
}
|
|
30
|
-
function videoPixelFormatToEnum(format) {
|
|
31
|
-
switch (format) {
|
|
32
|
-
case 'I420': {
|
|
33
|
-
return VideoPixelFormatEnum.I420;
|
|
34
|
-
}
|
|
35
|
-
case 'NV12': {
|
|
36
|
-
return VideoPixelFormatEnum.Nv12;
|
|
37
|
-
}
|
|
38
|
-
case 'RGBA': {
|
|
39
|
-
return VideoPixelFormatEnum.Rgba;
|
|
40
|
-
}
|
|
41
|
-
}
|
|
42
|
-
}
|
|
43
|
-
class GumVideoCaptureOptions {
|
|
44
|
-
constructor() {
|
|
45
|
-
this.maxWidth = 640;
|
|
46
|
-
this.maxHeight = 480;
|
|
47
|
-
this.maxFramerate = 30;
|
|
48
|
-
}
|
|
49
|
-
}
|
|
50
|
-
exports.GumVideoCaptureOptions = GumVideoCaptureOptions;
|
|
51
|
-
class GumVideoCapturer {
|
|
52
|
-
constructor(defaultCaptureOptions) {
|
|
53
|
-
this.spawnedSenderRunning = false;
|
|
54
|
-
this.defaultCaptureOptions = defaultCaptureOptions;
|
|
55
|
-
}
|
|
56
|
-
capturing() {
|
|
57
|
-
return this.captureOptions != undefined;
|
|
58
|
-
}
|
|
59
|
-
setLocalPreview(localPreview) {
|
|
60
|
-
const oldLocalPreview = this.localPreview?.current;
|
|
61
|
-
if (oldLocalPreview) {
|
|
62
|
-
oldLocalPreview.srcObject = null;
|
|
63
|
-
}
|
|
64
|
-
this.localPreview = localPreview;
|
|
65
|
-
this.updateLocalPreviewSourceObject();
|
|
66
|
-
// This is a dumb hack around the fact that sometimes the
|
|
67
|
-
// this.localPreview.current is updated without a call
|
|
68
|
-
// to setLocalPreview, in which case the local preview
|
|
69
|
-
// won't be rendered.
|
|
70
|
-
if (this.updateLocalPreviewIntervalId != undefined) {
|
|
71
|
-
clearInterval(this.updateLocalPreviewIntervalId);
|
|
72
|
-
}
|
|
73
|
-
this.updateLocalPreviewIntervalId = setInterval(this.updateLocalPreviewSourceObject.bind(this), 1000);
|
|
74
|
-
}
|
|
75
|
-
async enableCapture(options) {
|
|
76
|
-
return this.startCapturing(options ?? this.defaultCaptureOptions);
|
|
77
|
-
}
|
|
78
|
-
async enableCaptureAndSend(sender, options) {
|
|
79
|
-
const startCapturingPromise = this.startCapturing(options ?? this.defaultCaptureOptions);
|
|
80
|
-
if (sender) {
|
|
81
|
-
this.startSending(sender);
|
|
82
|
-
}
|
|
83
|
-
// Bubble up the error.
|
|
84
|
-
return startCapturingPromise;
|
|
85
|
-
}
|
|
86
|
-
disable() {
|
|
87
|
-
this.stopCapturing();
|
|
88
|
-
this.stopSending();
|
|
89
|
-
if (this.updateLocalPreviewIntervalId != undefined) {
|
|
90
|
-
clearInterval(this.updateLocalPreviewIntervalId);
|
|
91
|
-
}
|
|
92
|
-
this.updateLocalPreviewIntervalId = undefined;
|
|
93
|
-
}
|
|
94
|
-
// eslint-disable-next-line @typescript-eslint/require-await
|
|
95
|
-
async setPreferredDevice(deviceId) {
|
|
96
|
-
this.preferredDeviceId = deviceId;
|
|
97
|
-
if (this.captureOptions) {
|
|
98
|
-
const { captureOptions, sender } = this;
|
|
99
|
-
this.disable();
|
|
100
|
-
// Bubble up the error if starting video failed.
|
|
101
|
-
return this.enableCaptureAndSend(sender, captureOptions);
|
|
102
|
-
}
|
|
103
|
-
}
|
|
104
|
-
async enumerateDevices() {
|
|
105
|
-
const devices = await window.navigator.mediaDevices.enumerateDevices();
|
|
106
|
-
const cameras = devices.filter(d => d.kind == 'videoinput');
|
|
107
|
-
return cameras;
|
|
108
|
-
}
|
|
109
|
-
async getUserMedia(options) {
|
|
110
|
-
// Return provided media stream
|
|
111
|
-
if (options.mediaStream) {
|
|
112
|
-
return options.mediaStream;
|
|
113
|
-
}
|
|
114
|
-
const constraints = {
|
|
115
|
-
audio: false,
|
|
116
|
-
video: {
|
|
117
|
-
deviceId: options.preferredDeviceId ?? this.preferredDeviceId,
|
|
118
|
-
width: {
|
|
119
|
-
max: options.maxWidth,
|
|
120
|
-
ideal: options.maxWidth,
|
|
121
|
-
},
|
|
122
|
-
height: {
|
|
123
|
-
max: options.maxHeight,
|
|
124
|
-
ideal: options.maxHeight,
|
|
125
|
-
},
|
|
126
|
-
frameRate: {
|
|
127
|
-
max: options.maxFramerate,
|
|
128
|
-
ideal: options.maxFramerate,
|
|
129
|
-
},
|
|
130
|
-
},
|
|
131
|
-
};
|
|
132
|
-
if (options.screenShareSourceId != undefined) {
|
|
133
|
-
constraints.video = {
|
|
134
|
-
mandatory: {
|
|
135
|
-
chromeMediaSource: 'desktop',
|
|
136
|
-
chromeMediaSourceId: options.screenShareSourceId,
|
|
137
|
-
maxWidth: options.maxWidth,
|
|
138
|
-
maxHeight: options.maxHeight,
|
|
139
|
-
minFrameRate: 1,
|
|
140
|
-
maxFrameRate: options.maxFramerate,
|
|
141
|
-
},
|
|
142
|
-
};
|
|
143
|
-
}
|
|
144
|
-
return window.navigator.mediaDevices.getUserMedia(constraints);
|
|
145
|
-
}
|
|
146
|
-
async startCapturing(options) {
|
|
147
|
-
if (this.capturing()) {
|
|
148
|
-
index_1.RingRTC.logWarn('startCapturing(): already capturing');
|
|
149
|
-
return;
|
|
150
|
-
}
|
|
151
|
-
index_1.RingRTC.logInfo(`startCapturing(): ${options.maxWidth}x${options.maxHeight}@${options.maxFramerate}`);
|
|
152
|
-
this.captureOptions = options;
|
|
153
|
-
try {
|
|
154
|
-
// If we start/stop/start, we may have concurrent calls to getUserMedia,
|
|
155
|
-
// which is what we want if we're switching from camera to screenshare.
|
|
156
|
-
// But we need to make sure we deal with the fact that things might be
|
|
157
|
-
// different after the await here.
|
|
158
|
-
const mediaStream = await this.getUserMedia(options);
|
|
159
|
-
// It's possible video was disabled, switched to screenshare, or
|
|
160
|
-
// switched to a different camera while awaiting a response, in
|
|
161
|
-
// which case we need to disable the camera we just accessed.
|
|
162
|
-
if (this.captureOptions != options) {
|
|
163
|
-
index_1.RingRTC.logWarn('startCapturing(): different state after getUserMedia()');
|
|
164
|
-
for (const track of mediaStream.getVideoTracks()) {
|
|
165
|
-
// Make the light turn off faster
|
|
166
|
-
track.stop();
|
|
167
|
-
}
|
|
168
|
-
return;
|
|
169
|
-
}
|
|
170
|
-
if (this.mediaStream !== undefined &&
|
|
171
|
-
this.mediaStream.getVideoTracks().length > 0) {
|
|
172
|
-
// We have a stream and track for the requested camera already. Stop
|
|
173
|
-
// the duplicate track that we just started.
|
|
174
|
-
index_1.RingRTC.logWarn('startCapturing(): dropping duplicate call to startCapturing');
|
|
175
|
-
for (const track of mediaStream.getVideoTracks()) {
|
|
176
|
-
track.stop();
|
|
177
|
-
}
|
|
178
|
-
return;
|
|
179
|
-
}
|
|
180
|
-
this.mediaStream = mediaStream;
|
|
181
|
-
if (!this.spawnedSenderRunning &&
|
|
182
|
-
this.mediaStream != undefined &&
|
|
183
|
-
this.sender != undefined) {
|
|
184
|
-
this.spawnSender(this.mediaStream, this.sender);
|
|
185
|
-
}
|
|
186
|
-
this.updateLocalPreviewSourceObject();
|
|
187
|
-
}
|
|
188
|
-
catch (e) {
|
|
189
|
-
index_1.RingRTC.logError(`startCapturing(): ${e}`);
|
|
190
|
-
// It's possible video was disabled, switched to screenshare, or
|
|
191
|
-
// switched to a different camera while awaiting a response, in
|
|
192
|
-
// which case we should reset the captureOptions if we set them.
|
|
193
|
-
if (this.captureOptions == options) {
|
|
194
|
-
// We couldn't open the camera. Oh well.
|
|
195
|
-
this.captureOptions = undefined;
|
|
196
|
-
}
|
|
197
|
-
// Re-raise so that callers can surface this condition to the user.
|
|
198
|
-
throw e;
|
|
199
|
-
}
|
|
200
|
-
}
|
|
201
|
-
stopCapturing() {
|
|
202
|
-
if (!this.capturing()) {
|
|
203
|
-
index_1.RingRTC.logWarn('stopCapturing(): not capturing');
|
|
204
|
-
return;
|
|
205
|
-
}
|
|
206
|
-
index_1.RingRTC.logInfo('stopCapturing()');
|
|
207
|
-
this.captureOptions = undefined;
|
|
208
|
-
if (this.mediaStream) {
|
|
209
|
-
for (const track of this.mediaStream.getVideoTracks()) {
|
|
210
|
-
// Make the light turn off faster
|
|
211
|
-
track.stop();
|
|
212
|
-
}
|
|
213
|
-
this.mediaStream = undefined;
|
|
214
|
-
}
|
|
215
|
-
this.updateLocalPreviewSourceObject();
|
|
216
|
-
}
|
|
217
|
-
startSending(sender) {
|
|
218
|
-
if (this.sender === sender) {
|
|
219
|
-
return;
|
|
220
|
-
}
|
|
221
|
-
if (this.sender) {
|
|
222
|
-
// If we're replacing an existing sender, make sure we stop the
|
|
223
|
-
// current setInterval loop before starting another one.
|
|
224
|
-
this.stopSending();
|
|
225
|
-
}
|
|
226
|
-
this.sender = sender;
|
|
227
|
-
if (!this.spawnedSenderRunning && this.mediaStream != undefined) {
|
|
228
|
-
this.spawnSender(this.mediaStream, this.sender);
|
|
229
|
-
}
|
|
230
|
-
}
|
|
231
|
-
spawnSender(mediaStream, sender) {
|
|
232
|
-
const track = mediaStream.getVideoTracks()[0];
|
|
233
|
-
if (track == undefined || this.spawnedSenderRunning) {
|
|
234
|
-
return;
|
|
235
|
-
}
|
|
236
|
-
const captureOptions = this.captureOptions;
|
|
237
|
-
if (track.readyState === 'ended') {
|
|
238
|
-
this.stopCapturing();
|
|
239
|
-
index_1.RingRTC.logError('spawnSender(): Video track ended before spawning sender');
|
|
240
|
-
return;
|
|
241
|
-
}
|
|
242
|
-
const reader = new MediaStreamTrackProcessor({
|
|
243
|
-
track,
|
|
244
|
-
}).readable.getReader();
|
|
245
|
-
const buffer = Buffer.alloc(exports.MAX_VIDEO_CAPTURE_BUFFER_SIZE);
|
|
246
|
-
this.spawnedSenderRunning = true;
|
|
247
|
-
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
|
248
|
-
(async () => {
|
|
249
|
-
try {
|
|
250
|
-
while (mediaStream == this.mediaStream) {
|
|
251
|
-
const { done, value: frame } = await reader.read();
|
|
252
|
-
if (done) {
|
|
253
|
-
break;
|
|
254
|
-
}
|
|
255
|
-
if (!frame) {
|
|
256
|
-
continue;
|
|
257
|
-
}
|
|
258
|
-
try {
|
|
259
|
-
const format = videoPixelFormatToEnum(frame.format ?? 'I420');
|
|
260
|
-
if (format == undefined) {
|
|
261
|
-
index_1.RingRTC.logWarn(`Unsupported video frame format: ${frame.format}`);
|
|
262
|
-
break;
|
|
263
|
-
}
|
|
264
|
-
const visibleRect = frame.visibleRect;
|
|
265
|
-
if (!visibleRect) {
|
|
266
|
-
continue;
|
|
267
|
-
}
|
|
268
|
-
await frame.copyTo(buffer);
|
|
269
|
-
if (sender !== this.sender) {
|
|
270
|
-
break;
|
|
271
|
-
}
|
|
272
|
-
sender.sendVideoFrame(visibleRect.width, visibleRect.height, format, buffer);
|
|
273
|
-
}
|
|
274
|
-
catch (e) {
|
|
275
|
-
index_1.RingRTC.logError(`sendVideoFrame(): ${e}`);
|
|
276
|
-
}
|
|
277
|
-
finally {
|
|
278
|
-
// This must be called for more frames to come.
|
|
279
|
-
frame.close();
|
|
280
|
-
}
|
|
281
|
-
}
|
|
282
|
-
}
|
|
283
|
-
catch (e) {
|
|
284
|
-
index_1.RingRTC.logError(`spawnSender(): ${e}`);
|
|
285
|
-
}
|
|
286
|
-
finally {
|
|
287
|
-
reader.releaseLock();
|
|
288
|
-
captureOptions?.onEnded?.();
|
|
289
|
-
}
|
|
290
|
-
this.spawnedSenderRunning = false;
|
|
291
|
-
})();
|
|
292
|
-
}
|
|
293
|
-
stopSending() {
|
|
294
|
-
// The spawned sender should stop
|
|
295
|
-
this.sender = undefined;
|
|
296
|
-
}
|
|
297
|
-
updateLocalPreviewSourceObject() {
|
|
298
|
-
if (!this.localPreview) {
|
|
299
|
-
return;
|
|
300
|
-
}
|
|
301
|
-
const localPreview = this.localPreview.current;
|
|
302
|
-
if (!localPreview) {
|
|
303
|
-
return;
|
|
304
|
-
}
|
|
305
|
-
const { mediaStream = null } = this;
|
|
306
|
-
if (localPreview.srcObject === mediaStream) {
|
|
307
|
-
return;
|
|
308
|
-
}
|
|
309
|
-
if (mediaStream && this.captureOptions) {
|
|
310
|
-
localPreview.srcObject = mediaStream;
|
|
311
|
-
if (localPreview.width === 0) {
|
|
312
|
-
localPreview.width = this.captureOptions.maxWidth;
|
|
313
|
-
}
|
|
314
|
-
if (localPreview.height === 0) {
|
|
315
|
-
localPreview.height = this.captureOptions.maxHeight;
|
|
316
|
-
}
|
|
317
|
-
}
|
|
318
|
-
else {
|
|
319
|
-
localPreview.srcObject = null;
|
|
320
|
-
}
|
|
321
|
-
}
|
|
322
|
-
}
|
|
323
|
-
exports.GumVideoCapturer = GumVideoCapturer;
|
|
324
|
-
// We add 10% in each dimension to allow for things that are slightly wider or taller than 1080p.
|
|
325
|
-
const MAX_VIDEO_CAPTURE_MULTIPLIER = 1.0;
|
|
326
|
-
exports.MAX_VIDEO_CAPTURE_WIDTH = 2880 * MAX_VIDEO_CAPTURE_MULTIPLIER;
|
|
327
|
-
exports.MAX_VIDEO_CAPTURE_HEIGHT = 1800 * MAX_VIDEO_CAPTURE_MULTIPLIER;
|
|
328
|
-
exports.MAX_VIDEO_CAPTURE_AREA = exports.MAX_VIDEO_CAPTURE_WIDTH * exports.MAX_VIDEO_CAPTURE_HEIGHT;
|
|
329
|
-
exports.MAX_VIDEO_CAPTURE_BUFFER_SIZE = exports.MAX_VIDEO_CAPTURE_AREA * 4;
|
|
330
|
-
class CanvasVideoRenderer {
|
|
331
|
-
constructor() {
|
|
332
|
-
this.buffer = Buffer.alloc(exports.MAX_VIDEO_CAPTURE_BUFFER_SIZE);
|
|
333
|
-
}
|
|
334
|
-
setCanvas(canvas) {
|
|
335
|
-
this.canvas = canvas;
|
|
336
|
-
}
|
|
337
|
-
enable(source) {
|
|
338
|
-
if (this.source === source) {
|
|
339
|
-
return;
|
|
340
|
-
}
|
|
341
|
-
if (this.source) {
|
|
342
|
-
// If we're replacing an existing source, make sure we stop the
|
|
343
|
-
// current rAF loop before starting another one.
|
|
344
|
-
if (this.rafId) {
|
|
345
|
-
window.cancelAnimationFrame(this.rafId);
|
|
346
|
-
}
|
|
347
|
-
}
|
|
348
|
-
this.source = source;
|
|
349
|
-
this.requestAnimationFrameCallback();
|
|
350
|
-
}
|
|
351
|
-
disable() {
|
|
352
|
-
this.renderBlack();
|
|
353
|
-
this.source = undefined;
|
|
354
|
-
if (this.rafId) {
|
|
355
|
-
window.cancelAnimationFrame(this.rafId);
|
|
356
|
-
}
|
|
357
|
-
}
|
|
358
|
-
requestAnimationFrameCallback() {
|
|
359
|
-
this.renderVideoFrame();
|
|
360
|
-
this.rafId = window.requestAnimationFrame(this.requestAnimationFrameCallback.bind(this));
|
|
361
|
-
}
|
|
362
|
-
renderBlack() {
|
|
363
|
-
if (!this.canvas) {
|
|
364
|
-
return;
|
|
365
|
-
}
|
|
366
|
-
const canvas = this.canvas.current;
|
|
367
|
-
if (!canvas) {
|
|
368
|
-
return;
|
|
369
|
-
}
|
|
370
|
-
const context = canvas.getContext('2d');
|
|
371
|
-
if (!context) {
|
|
372
|
-
return;
|
|
373
|
-
}
|
|
374
|
-
context.fillStyle = 'black';
|
|
375
|
-
context.fillRect(0, 0, canvas.width, canvas.height);
|
|
376
|
-
}
|
|
377
|
-
renderVideoFrame() {
|
|
378
|
-
if (!this.source || !this.canvas) {
|
|
379
|
-
return;
|
|
380
|
-
}
|
|
381
|
-
const canvas = this.canvas.current;
|
|
382
|
-
if (!canvas) {
|
|
383
|
-
return;
|
|
384
|
-
}
|
|
385
|
-
const context = canvas.getContext('2d');
|
|
386
|
-
if (!context) {
|
|
387
|
-
return;
|
|
388
|
-
}
|
|
389
|
-
const frame = this.source.receiveVideoFrame(this.buffer, exports.MAX_VIDEO_CAPTURE_WIDTH, exports.MAX_VIDEO_CAPTURE_HEIGHT);
|
|
390
|
-
if (!frame) {
|
|
391
|
-
return;
|
|
392
|
-
}
|
|
393
|
-
const [width, height] = frame;
|
|
394
|
-
if (canvas.clientWidth <= 0 ||
|
|
395
|
-
width <= 0 ||
|
|
396
|
-
canvas.clientHeight <= 0 ||
|
|
397
|
-
height <= 0) {
|
|
398
|
-
return;
|
|
399
|
-
}
|
|
400
|
-
const frameAspectRatio = width / height;
|
|
401
|
-
const canvasAspectRatio = canvas.clientWidth / canvas.clientHeight;
|
|
402
|
-
let dx = 0;
|
|
403
|
-
let dy = 0;
|
|
404
|
-
if (frameAspectRatio > canvasAspectRatio) {
|
|
405
|
-
// Frame wider than view: We need bars at the top and bottom
|
|
406
|
-
canvas.width = width;
|
|
407
|
-
canvas.height = width / canvasAspectRatio;
|
|
408
|
-
dy = (canvas.height - height) / 2;
|
|
409
|
-
}
|
|
410
|
-
else if (frameAspectRatio < canvasAspectRatio) {
|
|
411
|
-
// Frame narrower than view: We need pillars on the sides
|
|
412
|
-
canvas.width = height * canvasAspectRatio;
|
|
413
|
-
canvas.height = height;
|
|
414
|
-
dx = (canvas.width - width) / 2;
|
|
415
|
-
}
|
|
416
|
-
else {
|
|
417
|
-
// Will stretch perfectly with no bars
|
|
418
|
-
canvas.width = width;
|
|
419
|
-
canvas.height = height;
|
|
420
|
-
}
|
|
421
|
-
if (dx > 0 || dy > 0) {
|
|
422
|
-
context.fillStyle = 'black';
|
|
423
|
-
context.fillRect(0, 0, canvas.width, canvas.height);
|
|
424
|
-
}
|
|
425
|
-
if (this.imageData?.width !== width || this.imageData?.height !== height) {
|
|
426
|
-
this.imageData = new ImageData(width, height);
|
|
427
|
-
}
|
|
428
|
-
this.imageData.data.set(this.buffer.subarray(0, width * height * 4));
|
|
429
|
-
context.putImageData(this.imageData, dx, dy);
|
|
430
|
-
}
|
|
431
|
-
}
|
|
432
|
-
exports.CanvasVideoRenderer = CanvasVideoRenderer;
|
|
433
|
-
//# sourceMappingURL=VideoSupport.js.map
|