livekit-client 2.15.5 → 2.15.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/dist/livekit-client.e2ee.worker.js +1 -1
  2. package/dist/livekit-client.e2ee.worker.js.map +1 -1
  3. package/dist/livekit-client.e2ee.worker.mjs +54 -50
  4. package/dist/livekit-client.e2ee.worker.mjs.map +1 -1
  5. package/dist/livekit-client.esm.mjs +81 -40
  6. package/dist/livekit-client.esm.mjs.map +1 -1
  7. package/dist/livekit-client.umd.js +1 -1
  8. package/dist/livekit-client.umd.js.map +1 -1
  9. package/dist/src/e2ee/E2eeManager.d.ts.map +1 -1
  10. package/dist/src/e2ee/worker/FrameCryptor.d.ts +0 -1
  11. package/dist/src/e2ee/worker/FrameCryptor.d.ts.map +1 -1
  12. package/dist/src/e2ee/worker/sifPayload.d.ts +22 -0
  13. package/dist/src/e2ee/worker/sifPayload.d.ts.map +1 -0
  14. package/dist/src/room/PCTransport.d.ts.map +1 -1
  15. package/dist/src/room/Room.d.ts.map +1 -1
  16. package/dist/src/room/participant/LocalParticipant.d.ts +1 -3
  17. package/dist/src/room/participant/LocalParticipant.d.ts.map +1 -1
  18. package/dist/src/room/track/LocalTrack.d.ts.map +1 -1
  19. package/dist/src/room/track/RemoteVideoTrack.d.ts +1 -0
  20. package/dist/src/room/track/RemoteVideoTrack.d.ts.map +1 -1
  21. package/dist/src/room/track/Track.d.ts +4 -1
  22. package/dist/src/room/track/Track.d.ts.map +1 -1
  23. package/dist/src/room/utils.d.ts +8 -0
  24. package/dist/src/room/utils.d.ts.map +1 -1
  25. package/dist/ts4.2/src/e2ee/worker/FrameCryptor.d.ts +0 -1
  26. package/dist/ts4.2/src/e2ee/worker/sifPayload.d.ts +22 -0
  27. package/dist/ts4.2/src/room/participant/LocalParticipant.d.ts +1 -3
  28. package/dist/ts4.2/src/room/track/RemoteVideoTrack.d.ts +1 -0
  29. package/dist/ts4.2/src/room/track/Track.d.ts +4 -1
  30. package/dist/ts4.2/src/room/utils.d.ts +8 -0
  31. package/package.json +10 -9
  32. package/src/e2ee/E2eeManager.ts +18 -1
  33. package/src/e2ee/worker/FrameCryptor.ts +8 -18
  34. package/src/e2ee/worker/e2ee.worker.ts +6 -1
  35. package/src/e2ee/worker/sifPayload.ts +75 -0
  36. package/src/room/PCTransport.ts +14 -5
  37. package/src/room/Room.ts +12 -3
  38. package/src/room/participant/LocalParticipant.ts +9 -23
  39. package/src/room/track/LocalTrack.ts +5 -2
  40. package/src/room/track/RemoteVideoTrack.ts +12 -2
  41. package/src/room/track/Track.ts +10 -1
  42. package/src/room/utils.ts +12 -3
  43. package/dist/src/e2ee/worker/SifGuard.d.ts +0 -11
  44. package/dist/src/e2ee/worker/SifGuard.d.ts.map +0 -1
  45. package/dist/ts4.2/src/e2ee/worker/SifGuard.d.ts +0 -11
  46. package/src/e2ee/worker/SifGuard.ts +0 -47
@@ -0,0 +1,75 @@
1
+ import type { VideoCodec } from '../..';
2
+
3
+ // Payload definitions taken from https://github.com/livekit/livekit/blob/master/pkg/sfu/downtrack.go#L104
4
+
5
+ export const VP8KeyFrame8x8 = new Uint8Array([
6
+ 0x10, 0x02, 0x00, 0x9d, 0x01, 0x2a, 0x08, 0x00, 0x08, 0x00, 0x00, 0x47, 0x08, 0x85, 0x85, 0x88,
7
+ 0x85, 0x84, 0x88, 0x02, 0x02, 0x00, 0x0c, 0x0d, 0x60, 0x00, 0xfe, 0xff, 0xab, 0x50, 0x80,
8
+ ]);
9
+
10
+ export const H264KeyFrame2x2SPS = new Uint8Array([
11
+ 0x67, 0x42, 0xc0, 0x1f, 0x0f, 0xd9, 0x1f, 0x88, 0x88, 0x84, 0x00, 0x00, 0x03, 0x00, 0x04, 0x00,
12
+ 0x00, 0x03, 0x00, 0xc8, 0x3c, 0x60, 0xc9, 0x20,
13
+ ]);
14
+
15
+ export const H264KeyFrame2x2PPS = new Uint8Array([0x68, 0x87, 0xcb, 0x83, 0xcb, 0x20]);
16
+
17
+ export const H264KeyFrame2x2IDR = new Uint8Array([
18
+ 0x65, 0x88, 0x84, 0x0a, 0xf2, 0x62, 0x80, 0x00, 0xa7, 0xbe,
19
+ ]);
20
+
21
+ export const H264KeyFrame2x2 = [H264KeyFrame2x2SPS, H264KeyFrame2x2PPS, H264KeyFrame2x2IDR];
22
+
23
+ export const OpusSilenceFrame = new Uint8Array([
24
+ 0xf8, 0xff, 0xfe, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
25
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
26
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
27
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
28
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
29
+ ]);
30
+
31
+ /**
32
+ * Create a crypto hash using Web Crypto API for secure comparison operations
33
+ */
34
+ async function cryptoHash(data: Uint8Array | ArrayBuffer): Promise<string> {
35
+ const hashBuffer = await crypto.subtle.digest('SHA-256', data);
36
+ const hashArray = new Uint8Array(hashBuffer);
37
+ return Array.from(hashArray)
38
+ .map((b) => b.toString(16).padStart(2, '0'))
39
+ .join('');
40
+ }
41
+
42
+ /**
43
+ * Pre-computed SHA-256 hashes for secure comparison operations
44
+ */
45
+ export const CryptoHashes = {
46
+ VP8KeyFrame8x8: 'ef0161653d8b2b23aad46624b420af1d03ce48950e9fc85718028f91b50f9219',
47
+ H264KeyFrame2x2SPS: 'f0a0e09647d891d6d50aa898bce7108090375d0d55e50a2bb21147afee558e44',
48
+ H264KeyFrame2x2PPS: '61d9665eed71b6d424ae9539330a3bdd5cb386d4d781c808219a6e36750493a7',
49
+ H264KeyFrame2x2IDR: 'faffc26b68a2fc09096fa20f3351e706398b6f838a7500c8063472c2e476e90d',
50
+ OpusSilenceFrame: 'aad8d31fc56b2802ca500e58c2fb9d0b29ad71bb7cb52cd6530251eade188988',
51
+ } as const;
52
+
53
+ /**
54
+ * Check if a byte array matches any of the known SIF payload frame types using secure crypto hashes
55
+ */
56
+ export async function identifySifPayload(
57
+ data: Uint8Array | ArrayBuffer,
58
+ ): Promise<VideoCodec | 'opus' | null> {
59
+ const hash = await cryptoHash(data);
60
+
61
+ switch (hash) {
62
+ case CryptoHashes.VP8KeyFrame8x8:
63
+ return 'vp8';
64
+ case CryptoHashes.H264KeyFrame2x2SPS:
65
+ return 'h264';
66
+ case CryptoHashes.H264KeyFrame2x2PPS:
67
+ return 'h264';
68
+ case CryptoHashes.H264KeyFrame2x2IDR:
69
+ return 'h264';
70
+ case CryptoHashes.OpusSilenceFrame:
71
+ return 'opus';
72
+ default:
73
+ return null;
74
+ }
75
+ }
@@ -165,10 +165,11 @@ export default class PCTransport extends EventEmitter {
165
165
  } else if (sd.type === 'answer') {
166
166
  const sdpParsed = parse(sd.sdp ?? '');
167
167
  sdpParsed.media.forEach((media) => {
168
+ const mid = getMidString(media.mid!);
168
169
  if (media.type === 'audio') {
169
170
  // mung sdp for opus bitrate settings
170
171
  this.trackBitrates.some((trackbr): boolean => {
171
- if (!trackbr.transceiver || media.mid != trackbr.transceiver.mid) {
172
+ if (!trackbr.transceiver || mid != trackbr.transceiver.mid) {
172
173
  return false;
173
174
  }
174
175
 
@@ -593,6 +594,9 @@ function ensureAudioNackAndStereo(
593
594
  stereoMids: string[],
594
595
  nackMids: string[],
595
596
  ) {
597
+ // sdp-transform types don't include number however the parser outputs mids as numbers in some cases
598
+ const mid = getMidString(media.mid!);
599
+
596
600
  // found opus codec to add nack fb
597
601
  let opusPayload = 0;
598
602
  media.rtp.some((rtp): boolean => {
@@ -610,7 +614,7 @@ function ensureAudioNackAndStereo(
610
614
  }
611
615
 
612
616
  if (
613
- nackMids.includes(media.mid!) &&
617
+ nackMids.includes(mid) &&
614
618
  !media.rtcpFb.some((fb) => fb.payload === opusPayload && fb.type === 'nack')
615
619
  ) {
616
620
  media.rtcpFb.push({
@@ -619,7 +623,7 @@ function ensureAudioNackAndStereo(
619
623
  });
620
624
  }
621
625
 
622
- if (stereoMids.includes(media.mid!)) {
626
+ if (stereoMids.includes(mid)) {
623
627
  media.fmtp.some((fmtp): boolean => {
624
628
  if (fmtp.payload === opusPayload) {
625
629
  if (!fmtp.config.includes('stereo=1')) {
@@ -642,6 +646,7 @@ function extractStereoAndNackAudioFromOffer(offer: RTCSessionDescriptionInit): {
642
646
  const sdpParsed = parse(offer.sdp ?? '');
643
647
  let opusPayload = 0;
644
648
  sdpParsed.media.forEach((media) => {
649
+ const mid = getMidString(media.mid!);
645
650
  if (media.type === 'audio') {
646
651
  media.rtp.some((rtp): boolean => {
647
652
  if (rtp.codec === 'opus') {
@@ -652,13 +657,13 @@ function extractStereoAndNackAudioFromOffer(offer: RTCSessionDescriptionInit): {
652
657
  });
653
658
 
654
659
  if (media.rtcpFb?.some((fb) => fb.payload === opusPayload && fb.type === 'nack')) {
655
- nackMids.push(media.mid!);
660
+ nackMids.push(mid);
656
661
  }
657
662
 
658
663
  media.fmtp.some((fmtp): boolean => {
659
664
  if (fmtp.payload === opusPayload) {
660
665
  if (fmtp.config.includes('sprop-stereo=1')) {
661
- stereoMids.push(media.mid!);
666
+ stereoMids.push(mid);
662
667
  }
663
668
  return true;
664
669
  }
@@ -682,3 +687,7 @@ function ensureIPAddrMatchVersion(media: MediaDescription) {
682
687
  }
683
688
  }
684
689
  }
690
+
691
+ function getMidString(mid: string | number) {
692
+ return typeof mid === 'number' ? mid.toFixed(0) : mid;
693
+ }
package/src/room/Room.ts CHANGED
@@ -970,7 +970,7 @@ class Room extends (EventEmitter as new () => TypedEmitter<RoomEventCallbacks>)
970
970
  * @internal for testing
971
971
  */
972
972
  async simulateScenario(scenario: SimulationScenario, arg?: any) {
973
- let postAction = () => {};
973
+ let postAction = async () => {};
974
974
  let req: SimulateScenario | undefined;
975
975
  switch (scenario) {
976
976
  case 'signal-reconnect':
@@ -1409,13 +1409,22 @@ class Room extends (EventEmitter as new () => TypedEmitter<RoomEventCallbacks>)
1409
1409
  }
1410
1410
  }
1411
1411
 
1412
- participant.addSubscribedMediaTrack(
1412
+ const publication = participant.addSubscribedMediaTrack(
1413
1413
  mediaTrack,
1414
1414
  trackId,
1415
1415
  stream,
1416
1416
  receiver,
1417
1417
  adaptiveStreamSettings,
1418
1418
  );
1419
+
1420
+ if (publication?.isEncrypted && !this.e2eeManager) {
1421
+ this.emit(
1422
+ RoomEvent.EncryptionError,
1423
+ new Error(
1424
+ `Encrypted ${publication.source} track received from participant ${participant.sid}, but room does not have encryption enabled!`,
1425
+ ),
1426
+ );
1427
+ }
1419
1428
  }
1420
1429
 
1421
1430
  private handleRestarting = () => {
@@ -1661,8 +1670,8 @@ class Room extends (EventEmitter as new () => TypedEmitter<RoomEventCallbacks>)
1661
1670
  return;
1662
1671
  }
1663
1672
  const newStreamState = Track.streamStateFromProto(streamState.state);
1673
+ pub.track.setStreamState(newStreamState);
1664
1674
  if (newStreamState !== pub.track.streamState) {
1665
- pub.track.streamState = newStreamState;
1666
1675
  participant.emit(ParticipantEvent.TrackStreamStateChanged, pub, pub.track.streamState);
1667
1676
  this.emitWhenConnected(
1668
1677
  RoomEvent.TrackStreamStateChanged,
@@ -9,7 +9,6 @@ import {
9
9
  Encryption_Type,
10
10
  JoinResponse,
11
11
  ParticipantInfo,
12
- ParticipantPermission,
13
12
  RequestResponse,
14
13
  RequestResponse_Reason,
15
14
  RpcAck,
@@ -453,16 +452,6 @@ export default class LocalParticipant extends Participant {
453
452
  return this.setTrackEnabled(Track.Source.ScreenShare, enabled, options, publishOptions);
454
453
  }
455
454
 
456
- /** @internal */
457
- setPermissions(permissions: ParticipantPermission): boolean {
458
- const prevPermissions = this.permissions;
459
- const changed = super.setPermissions(permissions);
460
- if (changed && prevPermissions) {
461
- this.emit(ParticipantEvent.ParticipantPermissionsChanged, prevPermissions);
462
- }
463
- return changed;
464
- }
465
-
466
455
  /** @internal */
467
456
  async setE2EEEnabled(enabled: boolean) {
468
457
  this.encryptionType = enabled ? Encryption_Type.GCM : Encryption_Type.NONE;
@@ -841,19 +830,20 @@ export default class LocalParticipant extends Participant {
841
830
  return existingPublication;
842
831
  }
843
832
 
833
+ const opts: TrackPublishOptions = {
834
+ ...this.roomOptions.publishDefaults,
835
+ ...options,
836
+ };
844
837
  const isStereoInput =
845
838
  ('channelCount' in track.mediaStreamTrack.getSettings() &&
846
839
  // @ts-ignore `channelCount` on getSettings() is currently only available for Safari, but is generally the best way to determine a stereo track https://developer.mozilla.org/en-US/docs/Web/API/MediaTrackSettings/channelCount
847
840
  track.mediaStreamTrack.getSettings().channelCount === 2) ||
848
841
  track.mediaStreamTrack.getConstraints().channelCount === 2;
849
- const isStereo = options?.forceStereo ?? isStereoInput;
842
+ const isStereo = opts.forceStereo ?? isStereoInput;
850
843
 
851
844
  // disable dtx for stereo track if not enabled explicitly
852
845
  if (isStereo) {
853
- if (!options) {
854
- options = {};
855
- }
856
- if (options.dtx === undefined) {
846
+ if (opts.dtx === undefined) {
857
847
  this.log.info(
858
848
  `Opus DTX will be disabled for stereo tracks by default. Enable them explicitly to make it work.`,
859
849
  {
@@ -862,18 +852,14 @@ export default class LocalParticipant extends Participant {
862
852
  },
863
853
  );
864
854
  }
865
- if (options.red === undefined) {
855
+ if (opts.red === undefined) {
866
856
  this.log.info(
867
857
  `Opus RED will be disabled for stereo tracks by default. Enable them explicitly to make it work.`,
868
858
  );
869
859
  }
870
- options.dtx ??= false;
871
- options.red ??= false;
860
+ opts.dtx ??= false;
861
+ opts.red ??= false;
872
862
  }
873
- const opts: TrackPublishOptions = {
874
- ...this.roomOptions.publishDefaults,
875
- ...options,
876
- };
877
863
 
878
864
  if (!isE2EESimulcastSupported() && this.roomOptions.e2ee) {
879
865
  this.log.info(
@@ -339,7 +339,7 @@ export default abstract class LocalTrack<
339
339
  if (this.kind === Track.Kind.Video) {
340
340
  streamConstraints.video = deviceId || facingMode ? { deviceId, facingMode } : true;
341
341
  } else {
342
- streamConstraints.audio = deviceId ? { deviceId } : true;
342
+ streamConstraints.audio = deviceId ? { deviceId, ...otherConstraints } : true;
343
343
  }
344
344
 
345
345
  // these steps are duplicated from setMediaStreamTrack because we must stop
@@ -356,7 +356,10 @@ export default abstract class LocalTrack<
356
356
  // create new track and attach
357
357
  const mediaStream = await navigator.mediaDevices.getUserMedia(streamConstraints);
358
358
  const newTrack = mediaStream.getTracks()[0];
359
- await newTrack.applyConstraints(otherConstraints);
359
+ if (this.kind === Track.Kind.Video) {
360
+ // we already captured the audio track with the constraints, so we only need to apply the video constraints
361
+ await newTrack.applyConstraints(otherConstraints);
362
+ }
360
363
  newTrack.addEventListener('ended', this.handleEnded);
361
364
  this.log.debug('re-acquired MediaStreamTrack', this.logContext);
362
365
 
@@ -38,6 +38,16 @@ export default class RemoteVideoTrack extends RemoteTrack<Track.Kind.Video> {
38
38
  return this.adaptiveStreamSettings !== undefined;
39
39
  }
40
40
 
41
+ override setStreamState(value: Track.StreamState) {
42
+ super.setStreamState(value);
43
+ console.log('setStreamState', value);
44
+ if (value === Track.StreamState.Active) {
45
+ // update visibility for adaptive stream tracks when stream state received from server is active
46
+ // this is needed to ensure the track is stopped when there's no element attached to it at all
47
+ this.updateVisibility();
48
+ }
49
+ }
50
+
41
51
  /**
42
52
  * Note: When using adaptiveStream, you need to use remoteVideoTrack.attach() to add the track to a HTMLVideoElement, otherwise your video tracks might never start
43
53
  */
@@ -220,7 +230,7 @@ export default class RemoteVideoTrack extends RemoteTrack<Track.Kind.Video> {
220
230
  this.updateDimensions();
221
231
  }, REACTION_DELAY);
222
232
 
223
- private updateVisibility() {
233
+ private updateVisibility(forceEmit?: boolean) {
224
234
  const lastVisibilityChange = this.elementInfos.reduce(
225
235
  (prev, info) => Math.max(prev, info.visibilityChangedAt || 0),
226
236
  0,
@@ -234,7 +244,7 @@ export default class RemoteVideoTrack extends RemoteTrack<Track.Kind.Video> {
234
244
  const isVisible =
235
245
  (this.elementInfos.some((info) => info.visible) && !backgroundPause) || isPiPMode;
236
246
 
237
- if (this.lastVisible === isVisible) {
247
+ if (this.lastVisible === isVisible && !forceEmit) {
238
248
  return;
239
249
  }
240
250
 
@@ -37,6 +37,8 @@ export abstract class Track<
37
37
 
38
38
  source: Track.Source;
39
39
 
40
+ private _streamState: Track.StreamState = Track.StreamState.Active;
41
+
40
42
  /**
41
43
  * sid is set after track is published to server, or if it's a remote track
42
44
  */
@@ -51,7 +53,14 @@ export abstract class Track<
51
53
  * indicates current state of stream, it'll indicate `paused` if the track
52
54
  * has been paused by congestion controller
53
55
  */
54
- streamState: Track.StreamState = Track.StreamState.Active;
56
+ get streamState(): Track.StreamState {
57
+ return this._streamState;
58
+ }
59
+
60
+ /** @internal */
61
+ setStreamState(value: Track.StreamState) {
62
+ this._streamState = value;
63
+ }
55
64
 
56
65
  /** @internal */
57
66
  rtpTimestamp: number | undefined;
package/src/room/utils.ts CHANGED
@@ -64,8 +64,9 @@ export function supportsAV1(): boolean {
64
64
  if (!('getCapabilities' in RTCRtpSender)) {
65
65
  return false;
66
66
  }
67
- if (isSafari()) {
67
+ if (isSafari() || isFireFox()) {
68
68
  // Safari 17 on iPhone14 reports AV1 capability, but does not actually support it
69
+ // Firefox does support AV1, but SVC publishing is not supported
69
70
  return false;
70
71
  }
71
72
  const capabilities = RTCRtpSender.getCapabilities('video');
@@ -214,12 +215,12 @@ export function isE2EESimulcastSupported() {
214
215
  } else if (
215
216
  browser.os === 'iOS' &&
216
217
  browser.osVersion &&
217
- compareVersions(supportedSafariVersion, browser.osVersion) >= 0
218
+ compareVersions(browser.osVersion, supportedSafariVersion) >= 0
218
219
  ) {
219
220
  return true;
220
221
  } else if (
221
222
  browser.name === 'Safari' &&
222
- compareVersions(supportedSafariVersion, browser.version) >= 0
223
+ compareVersions(browser.version, supportedSafariVersion) >= 0
223
224
  ) {
224
225
  return true;
225
226
  } else {
@@ -282,6 +283,14 @@ export function getDevicePixelRatio(): number {
282
283
  return 1;
283
284
  }
284
285
 
286
+ /**
287
+ * @param v1 - The first version string to compare.
288
+ * @param v2 - The second version string to compare.
289
+ * @returns A number indicating the order of the versions:
290
+ * - 1 if v1 is greater than v2
291
+ * - -1 if v1 is less than v2
292
+ * - 0 if v1 and v2 are equal
293
+ */
285
294
  export function compareVersions(v1: string, v2: string): number {
286
295
  const parts1 = v1.split('.');
287
296
  const parts2 = v2.split('.');
@@ -1,11 +0,0 @@
1
- export declare class SifGuard {
2
- private consecutiveSifCount;
3
- private sifSequenceStartedAt;
4
- private lastSifReceivedAt;
5
- private userFramesSinceSif;
6
- recordSif(): void;
7
- recordUserFrame(): void;
8
- isSifAllowed(): boolean;
9
- reset(): void;
10
- }
11
- //# sourceMappingURL=SifGuard.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"SifGuard.d.ts","sourceRoot":"","sources":["../../../../src/e2ee/worker/SifGuard.ts"],"names":[],"mappings":"AAEA,qBAAa,QAAQ;IACnB,OAAO,CAAC,mBAAmB,CAAK;IAEhC,OAAO,CAAC,oBAAoB,CAAqB;IAEjD,OAAO,CAAC,iBAAiB,CAAa;IAEtC,OAAO,CAAC,kBAAkB,CAAa;IAEvC,SAAS;IAMT,eAAe;IAgBf,YAAY;IAQZ,KAAK;CAKN"}
@@ -1,11 +0,0 @@
1
- export declare class SifGuard {
2
- private consecutiveSifCount;
3
- private sifSequenceStartedAt;
4
- private lastSifReceivedAt;
5
- private userFramesSinceSif;
6
- recordSif(): void;
7
- recordUserFrame(): void;
8
- isSifAllowed(): boolean;
9
- reset(): void;
10
- }
11
- //# sourceMappingURL=SifGuard.d.ts.map
@@ -1,47 +0,0 @@
1
- import { MAX_SIF_COUNT, MAX_SIF_DURATION } from '../constants';
2
-
3
- export class SifGuard {
4
- private consecutiveSifCount = 0;
5
-
6
- private sifSequenceStartedAt: number | undefined;
7
-
8
- private lastSifReceivedAt: number = 0;
9
-
10
- private userFramesSinceSif: number = 0;
11
-
12
- recordSif() {
13
- this.consecutiveSifCount += 1;
14
- this.sifSequenceStartedAt ??= Date.now();
15
- this.lastSifReceivedAt = Date.now();
16
- }
17
-
18
- recordUserFrame() {
19
- if (this.sifSequenceStartedAt === undefined) {
20
- return;
21
- } else {
22
- this.userFramesSinceSif += 1;
23
- }
24
- if (
25
- // reset if we received more user frames than SIFs
26
- this.userFramesSinceSif > this.consecutiveSifCount ||
27
- // also reset if we got a new user frame and the latest SIF frame hasn't been updated in a while
28
- Date.now() - this.lastSifReceivedAt > MAX_SIF_DURATION
29
- ) {
30
- this.reset();
31
- }
32
- }
33
-
34
- isSifAllowed() {
35
- return (
36
- this.consecutiveSifCount < MAX_SIF_COUNT &&
37
- (this.sifSequenceStartedAt === undefined ||
38
- Date.now() - this.sifSequenceStartedAt < MAX_SIF_DURATION)
39
- );
40
- }
41
-
42
- reset() {
43
- this.userFramesSinceSif = 0;
44
- this.consecutiveSifCount = 0;
45
- this.sifSequenceStartedAt = undefined;
46
- }
47
- }