homebridge-unifi-protect 5.5.4 → 6.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. package/README.md +3 -3
  2. package/config.schema.json +17 -16
  3. package/dist/index.d.ts +3 -0
  4. package/dist/index.js +6 -6
  5. package/dist/index.js.map +1 -1
  6. package/dist/protect-camera.d.ts +58 -0
  7. package/dist/protect-camera.js +367 -246
  8. package/dist/protect-camera.js.map +1 -1
  9. package/dist/protect-device.d.ts +48 -0
  10. package/dist/protect-device.js +189 -0
  11. package/dist/protect-device.js.map +1 -0
  12. package/dist/protect-doorbell.d.ts +22 -0
  13. package/dist/protect-doorbell.js +75 -64
  14. package/dist/protect-doorbell.js.map +1 -1
  15. package/dist/protect-ffmpeg-record.d.ts +15 -0
  16. package/dist/protect-ffmpeg-record.js +48 -34
  17. package/dist/protect-ffmpeg-record.js.map +1 -1
  18. package/dist/protect-ffmpeg-stream.d.ts +15 -0
  19. package/dist/protect-ffmpeg-stream.js +22 -12
  20. package/dist/protect-ffmpeg-stream.js.map +1 -1
  21. package/dist/protect-ffmpeg.d.ts +42 -0
  22. package/dist/protect-ffmpeg.js +49 -58
  23. package/dist/protect-ffmpeg.js.map +1 -1
  24. package/dist/protect-light.d.ts +13 -0
  25. package/dist/protect-light.js +63 -40
  26. package/dist/protect-light.js.map +1 -1
  27. package/dist/protect-liveviews.d.ts +17 -0
  28. package/dist/protect-liveviews.js +117 -101
  29. package/dist/protect-liveviews.js.map +1 -1
  30. package/dist/protect-mqtt.d.ts +19 -0
  31. package/dist/protect-mqtt.js +26 -35
  32. package/dist/protect-mqtt.js.map +1 -1
  33. package/dist/protect-nvr-events.d.ts +30 -0
  34. package/dist/protect-nvr-events.js +168 -431
  35. package/dist/protect-nvr-events.js.map +1 -1
  36. package/dist/protect-nvr-systeminfo.d.ts +15 -0
  37. package/dist/protect-nvr-systeminfo.js +43 -49
  38. package/dist/protect-nvr-systeminfo.js.map +1 -1
  39. package/dist/protect-nvr.d.ts +48 -0
  40. package/dist/protect-nvr.js +327 -359
  41. package/dist/protect-nvr.js.map +1 -1
  42. package/dist/protect-options.d.ts +39 -0
  43. package/dist/protect-options.js +172 -6
  44. package/dist/protect-options.js.map +1 -1
  45. package/dist/protect-platform.d.ts +17 -0
  46. package/dist/protect-platform.js +17 -30
  47. package/dist/protect-platform.js.map +1 -1
  48. package/dist/protect-record.d.ts +33 -0
  49. package/dist/protect-record.js +130 -126
  50. package/dist/protect-record.js.map +1 -1
  51. package/dist/protect-rtp.d.ts +29 -0
  52. package/dist/protect-rtp.js +133 -16
  53. package/dist/protect-rtp.js.map +1 -1
  54. package/dist/protect-securitysystem.d.ts +18 -0
  55. package/dist/protect-securitysystem.js +105 -109
  56. package/dist/protect-securitysystem.js.map +1 -1
  57. package/dist/protect-sensor.d.ts +28 -0
  58. package/dist/protect-sensor.js +79 -97
  59. package/dist/protect-sensor.js.map +1 -1
  60. package/dist/protect-stream.d.ts +41 -0
  61. package/dist/protect-stream.js +298 -156
  62. package/dist/protect-stream.js.map +1 -1
  63. package/dist/protect-timeshift.d.ts +30 -0
  64. package/dist/protect-timeshift.js +65 -48
  65. package/dist/protect-timeshift.js.map +1 -1
  66. package/dist/protect-types.d.ts +50 -0
  67. package/dist/protect-types.js +22 -0
  68. package/dist/protect-types.js.map +1 -0
  69. package/dist/protect-viewer.d.ts +17 -0
  70. package/dist/protect-viewer.js +41 -47
  71. package/dist/protect-viewer.js.map +1 -1
  72. package/dist/settings.d.ts +22 -0
  73. package/dist/settings.js +30 -35
  74. package/dist/settings.js.map +1 -1
  75. package/homebridge-ui/public/index.html +715 -0
  76. package/homebridge-ui/server.js +156 -0
  77. package/package.json +15 -15
  78. package/dist/protect-accessory.js +0 -184
  79. package/dist/protect-accessory.js.map +0 -1
@@ -1,45 +1,37 @@
1
- "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
- Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.ProtectStreamingDelegate = void 0;
7
- const unifi_protect_1 = require("unifi-protect");
8
- const settings_1 = require("./settings");
9
- const protect_ffmpeg_stream_1 = require("./protect-ffmpeg-stream");
10
- const protect_record_1 = require("./protect-record");
11
- const protect_rtp_1 = require("./protect-rtp");
12
- const ws_1 = __importDefault(require("ws"));
13
- const events_1 = __importDefault(require("events"));
14
- const ffmpeg_for_homebridge_1 = __importDefault(require("ffmpeg-for-homebridge"));
15
- const camera_utils_1 = require("@homebridge/camera-utils");
16
- // Increase the listener limits to support Protect installations with more than 10 cameras. 100 seems like a reasonable default.
17
- // eslint-disable-next-line @typescript-eslint/no-var-requires, @typescript-eslint/no-unsafe-member-access
18
- require("events").EventEmitter.defaultMaxListeners = 100;
1
+ import { PROTECT_FFMPEG_AUDIO_FILTER_FFTNR, PROTECT_FFMPEG_AUDIO_FILTER_HIGHPASS, PROTECT_FFMPEG_AUDIO_FILTER_LOWPASS, PROTECT_HKSV_SEGMENT_LENGTH, PROTECT_HKSV_TIMESHIFT_BUFFER_MAXLENGTH, PROTECT_SNAPSHOT_CACHE_MAXAGE } from "./settings.js";
2
+ import { FetchError } from "unifi-protect";
3
+ import { FfmpegStreamingProcess } from "./protect-ffmpeg-stream.js";
4
+ import { ProtectRecordingDelegate } from "./protect-record.js";
5
+ import { RtpDemuxer } from "./protect-rtp.js";
6
+ import WebSocket from "ws";
7
+ import events from "node:events";
8
+ import ffmpegPath from "ffmpeg-for-homebridge";
9
+ import { platform } from "node:process";
19
10
  // Camera streaming delegate implementation for Protect.
20
- class ProtectStreamingDelegate {
11
+ export class ProtectStreamingDelegate {
12
+ // Create an instance of a HomeKit streaming delegate.
21
13
  constructor(protectCamera, resolutions) {
22
14
  this.api = protectCamera.api;
23
15
  this.config = protectCamera.platform.config;
24
- this.debug = protectCamera.platform.debug.bind(protectCamera.platform);
25
16
  this.hap = protectCamera.api.hap;
26
17
  this.hksv = null;
27
- this.log = protectCamera.platform.log;
28
- this.name = protectCamera.name.bind(protectCamera);
18
+ this.log = protectCamera.log;
29
19
  this.nvr = protectCamera.nvr;
30
20
  this.ongoingSessions = {};
31
21
  this.protectCamera = protectCamera;
32
22
  this.pendingSessions = {};
33
23
  this.platform = protectCamera.platform;
24
+ this.probesizeOverride = 0;
25
+ this.probesizeOverrideCount = 0;
34
26
  this.rtspEntry = null;
35
27
  this.savedBitrate = 0;
36
28
  this.snapshotCache = {};
37
29
  this.verboseFfmpeg = false;
38
- this.videoEncoder = this.config.videoEncoder || "libx264";
39
- this.videoProcessor = this.config.videoProcessor || ffmpeg_for_homebridge_1.default || "ffmpeg";
30
+ this.videoEncoderOptions = this.getVideoEncoderOptions();
31
+ this.videoProcessor = this.config.videoProcessor || ffmpegPath || "ffmpeg";
40
32
  // Setup for HKSV, if enabled.
41
- if (this.protectCamera.isHksv) {
42
- this.hksv = new protect_record_1.ProtectRecordingDelegate(protectCamera);
33
+ if (this.protectCamera.hasHksv) {
34
+ this.hksv = new ProtectRecordingDelegate(protectCamera);
43
35
  }
44
36
  // Setup for our camera controller.
45
37
  const options = {
@@ -48,15 +40,15 @@ class ProtectStreamingDelegate {
48
40
  // Our streaming delegate - aka us.
49
41
  delegate: this,
50
42
  // Our recording capabilities for HomeKit Secure Video.
51
- recording: !this.protectCamera.isHksv ? undefined : {
43
+ recording: !this.protectCamera.hasHksv ? undefined : {
52
44
  delegate: this.hksv,
53
45
  options: {
54
46
  audio: {
55
47
  codecs: [
56
48
  {
57
49
  // Protect supports a 48 KHz sampling rate, and the low complexity AAC profile.
58
- samplerate: 5 /* this.api.hap.AudioRecordingSamplerate.KHZ_48 */,
59
- type: 0 /* this.api.hap.AudioRecordingCodecType.AAC_LC */
50
+ samplerate: 5 /* AudioRecordingSamplerate.KHZ_48 */,
51
+ type: 0 /* AudioRecordingCodecType.AAC_LC */
60
52
  }
61
53
  ]
62
54
  },
@@ -64,21 +56,21 @@ class ProtectStreamingDelegate {
64
56
  {
65
57
  // The default HKSV segment length is 4000ms. It turns out that any setting less than that will disable
66
58
  // HomeKit Secure Video.
67
- fragmentLength: settings_1.PROTECT_HKSV_SEGMENT_LENGTH,
68
- type: 0 /* this.api.hap.MediaContainerType.FRAGMENTED_MP4 */
59
+ fragmentLength: PROTECT_HKSV_SEGMENT_LENGTH,
60
+ type: 0 /* MediaContainerType.FRAGMENTED_MP4 */
69
61
  }
70
62
  ],
71
63
  // Maximum prebuffer length supported. In Protect, this is effectively unlimited, but HomeKit only seems to
72
64
  // request a maximum of a 4000ms prebuffer.
73
- prebufferLength: settings_1.PROTECT_HKSV_BUFFER_LENGTH,
65
+ prebufferLength: PROTECT_HKSV_TIMESHIFT_BUFFER_MAXLENGTH,
74
66
  video: {
75
67
  parameters: {
76
68
  // Through admittedly anecdotal testing on various G3 and G4 models, UniFi Protect seems to support
77
69
  // only the H.264 Main profile, though it does support various H.264 levels, ranging from Level 3
78
70
  // through Level 5.1 (G4 Pro at maximum resolution). However, HomeKit only supports Level 3.1, 3.2,
79
71
  // and 4.0 currently.
80
- levels: [0 /* this.hap.H264Level.LEVEL3_1 */, 1 /* this.hap.H264Level.LEVEL3_2 */, 2 /* this.hap.H264Level.LEVEL4_0 */],
81
- profiles: [1 /* this.hap.H264Profile.MAIN */]
72
+ levels: [0 /* H264Level.LEVEL3_1 */, 1 /* H264Level.LEVEL3_2 */, 2 /* H264Level.LEVEL4_0 */],
73
+ profiles: [1 /* H264Profile.MAIN */]
82
74
  },
83
75
  resolutions: resolutions,
84
76
  type: 0 /* this.api.hap.VideoCodecType.H264 */
@@ -86,18 +78,20 @@ class ProtectStreamingDelegate {
86
78
  }
87
79
  },
88
80
  // Our motion sensor.
89
- sensors: !this.protectCamera.isHksv ? undefined : {
81
+ sensors: !this.protectCamera.hasHksv ? undefined : {
90
82
  motion: this.protectCamera.accessory.getService(this.hap.Service.MotionSensor)
91
83
  },
92
84
  streamingOptions: {
93
85
  audio: {
94
86
  codecs: [
95
87
  {
96
- samplerate: 16 /* AudioStreamingSamplerate.KHZ_16 */,
88
+ audioChannels: 1,
89
+ bitrate: 0,
90
+ samplerate: 24 /* AudioStreamingSamplerate.KHZ_24 */,
97
91
  type: "AAC-eld" /* AudioStreamingCodecType.AAC_ELD */
98
92
  }
99
93
  ],
100
- twoWayAudio: this.protectCamera.twoWayAudio
94
+ twoWayAudio: this.protectCamera.hints.twoWayAudio
101
95
  },
102
96
  supportedCryptoSuites: [0 /* this.hap.SRTPCryptoSuites.AES_CM_128_HMAC_SHA1_80 */],
103
97
  video: {
@@ -106,8 +100,8 @@ class ProtectStreamingDelegate {
106
100
  // only the H.264 Main profile, though it does support various H.264 levels, ranging from Level 3
107
101
  // through Level 5.1 (G4 Pro at maximum resolution). However, HomeKit only supports Level 3.1, 3.2,
108
102
  // and 4.0 currently.
109
- levels: [0 /* this.hap.H264Level.LEVEL3_1 */, 1 /* this.hap.H264Level.LEVEL3_2 */, 2 /* this.hap.H264Level.LEVEL4_0 */],
110
- profiles: [1 /* this.hap.H264Profile.MAIN */]
103
+ levels: [0 /* H264Level.LEVEL3_1 */, 1 /* H264Level.LEVEL3_2 */, 2 /* H264Level.LEVEL4_0 */],
104
+ profiles: [1 /* H264Profile.MAIN */]
111
105
  },
112
106
  // Retrieve the list of supported resolutions from the camera and apply our best guesses for how to
113
107
  // map specific resolutions to the available RTSP streams on a camera. Unfortunately, this creates
@@ -122,12 +116,11 @@ class ProtectStreamingDelegate {
122
116
  }
123
117
  // HomeKit image snapshot request handler.
124
118
  async handleSnapshotRequest(request, callback) {
125
- var _a;
126
119
  const snapshot = await this.getSnapshot(request);
127
120
  // No snapshot was returned - we're done here.
128
121
  if (!snapshot) {
129
122
  if (callback) {
130
- callback(new Error(this.name() + ": Unable to retrieve a snapshot"));
123
+ callback(new Error(this.protectCamera.name + ": Unable to retrieve a snapshot"));
131
124
  }
132
125
  return;
133
126
  }
@@ -136,39 +129,66 @@ class ProtectStreamingDelegate {
136
129
  callback(undefined, snapshot);
137
130
  }
138
131
  // Publish the snapshot as a data URL to MQTT, if configured.
139
- (_a = this.nvr.mqtt) === null || _a === void 0 ? void 0 : _a.publish(this.protectCamera.accessory, "snapshot", "data:image/jpeg;base64," + snapshot.toString("base64"));
132
+ this.nvr.mqtt?.publish(this.protectCamera.accessory, "snapshot", "data:image/jpeg;base64," + snapshot.toString("base64"));
140
133
  }
141
134
  // Prepare to launch the video stream.
142
135
  async prepareStream(request, callback) {
143
- const cameraConfig = this.protectCamera.accessory.context.device;
136
+ let reservePortFailed = false;
137
+ const rtpPortReservations = [];
138
+ // We use this utility to identify errors in reserving UDP ports for our use.
139
+ const reservePort = async (ipFamily = "ipv4", portCount = 1) => {
140
+ // If we've already failed, don't keep trying to find more ports.
141
+ if (reservePortFailed) {
142
+ return -1;
143
+ }
144
+ // Retrieve the ports we're looking for.
145
+ const assignedPort = await this.platform.rtpPorts.reservePort(ipFamily, portCount);
146
+ // We didn't get the ports we requested.
147
+ if (assignedPort === -1) {
148
+ reservePortFailed = true;
149
+ }
150
+ else {
151
+ // Add this reservation the list of ports we've successfully requested.
152
+ rtpPortReservations.push(assignedPort);
153
+ if (portCount === 2) {
154
+ rtpPortReservations.push(assignedPort + 1);
155
+ }
156
+ }
157
+ // Return them.
158
+ return assignedPort;
159
+ };
144
160
  // Check if audio support is enabled.
145
- const isAudioEnabled = this.nvr.optionEnabled(cameraConfig, "Audio", true, request.targetAddress);
161
+ const isAudioEnabled = this.nvr.optionEnabled(this.protectCamera.ufp, "Audio", true, request.targetAddress);
146
162
  // We need to check for AAC support because it's going to determine whether we support audio.
147
- const hasLibFdk = isAudioEnabled && (await protect_ffmpeg_stream_1.FfmpegStreamingProcess.codecEnabled(this.videoProcessor, "libfdk_aac", this.log));
163
+ const hasLibFdk = isAudioEnabled && (await FfmpegStreamingProcess.codecEnabled(this.videoProcessor, "libfdk_aac", this.log));
148
164
  // Setup our audio plumbing.
149
- const audioIncomingRtcpPort = (await (0, camera_utils_1.reservePorts)({ count: 1 }))[0];
150
- const audioIncomingPort = (hasLibFdk && this.protectCamera.twoWayAudio) ? (await (0, camera_utils_1.reservePorts)({ count: 1 }))[0] : -1;
151
- const audioIncomingRtpPort = (hasLibFdk && this.protectCamera.twoWayAudio) ? (await (0, camera_utils_1.reservePorts)({ count: 2 }))[0] : -1;
165
+ const audioIncomingRtcpPort = (await reservePort(request.addressVersion));
166
+ const audioIncomingPort = (hasLibFdk && this.protectCamera.hints.twoWayAudio) ? (await reservePort(request.addressVersion)) : -1;
167
+ const audioIncomingRtpPort = (hasLibFdk && this.protectCamera.hints.twoWayAudio) ? (await reservePort(request.addressVersion, 2)) : -1;
152
168
  const audioSSRC = this.hap.CameraController.generateSynchronisationSource();
153
169
  if (!hasLibFdk) {
154
- this.log.info("%s: Audio support disabled.%s", this.name(), isAudioEnabled ? " A version of FFmpeg that is compiled with fdk_aac support is required to support audio." : "");
170
+ this.log.info("Audio support disabled.%s", isAudioEnabled ? " A version of FFmpeg that is compiled with fdk_aac support is required to support audio." : "");
155
171
  }
156
172
  let rtpDemuxer = null;
157
173
  let talkBack = null;
158
- if (hasLibFdk && this.protectCamera.twoWayAudio) {
174
+ if (hasLibFdk && this.protectCamera.hints.twoWayAudio) {
159
175
  // Setup the RTP demuxer for two-way audio scenarios.
160
- rtpDemuxer = new protect_rtp_1.RtpDemuxer(this, request.addressVersion, audioIncomingPort, audioIncomingRtcpPort, audioIncomingRtpPort);
176
+ rtpDemuxer = new RtpDemuxer(this, request.addressVersion, audioIncomingPort, audioIncomingRtcpPort, audioIncomingRtpPort);
161
177
  // Request the talkback websocket from the controller.
162
- const params = new URLSearchParams({ camera: cameraConfig.id });
163
- talkBack = await this.nvr.nvrApi.getWsEndpoint(this.nvr.nvrApi.wsUrl() + "/talkback?" + params.toString());
178
+ const params = new URLSearchParams({ camera: this.protectCamera.ufp.id });
179
+ talkBack = await this.nvr.ufpApi.getWsEndpoint("talkback", params);
164
180
  // Something went wrong and we don't have a talkback websocket.
165
181
  if (!talkBack) {
166
- this.log.error("%s: Unable to open the return audio channel.", this.name());
182
+ this.log.error("Unable to open the return audio channel.");
167
183
  }
168
184
  }
169
185
  // Setup our video plumbing.
170
- const videoReturnPort = (await (0, camera_utils_1.reservePorts)({ count: 1 }))[0];
186
+ const videoReturnPort = (await reservePort(request.addressVersion));
171
187
  const videoSSRC = this.hap.CameraController.generateSynchronisationSource();
188
+ // If we've had failures to retrieve the UDP ports we're looking for, inform the user.
189
+ if (reservePortFailed) {
190
+ this.log.error("Unable to reserve the UDP ports needed to begin streaming.");
191
+ }
172
192
  const sessionInfo = {
173
193
  address: request.targetAddress,
174
194
  addressVersion: request.addressVersion,
@@ -180,6 +200,7 @@ class ProtectStreamingDelegate {
180
200
  audioSSRC: audioSSRC,
181
201
  hasLibFdk: hasLibFdk,
182
202
  rtpDemuxer: rtpDemuxer,
203
+ rtpPortReservations: rtpPortReservations,
183
204
  talkBack: talkBack,
184
205
  videoCryptoSuite: request.video.srtpCryptoSuite,
185
206
  videoPort: request.video.port,
@@ -192,7 +213,7 @@ class ProtectStreamingDelegate {
192
213
  // it simple and don't use a demuxer.
193
214
  const response = {
194
215
  audio: {
195
- port: (hasLibFdk && this.protectCamera.twoWayAudio) ? audioIncomingPort : audioIncomingRtcpPort,
216
+ port: (hasLibFdk && this.protectCamera.hints.twoWayAudio) ? audioIncomingPort : audioIncomingRtcpPort,
196
217
  // eslint-disable-next-line camelcase
197
218
  srtp_key: request.audio.srtp_key,
198
219
  // eslint-disable-next-line camelcase
@@ -214,26 +235,25 @@ class ProtectStreamingDelegate {
214
235
  }
215
236
  // Launch the Protect video (and audio) stream.
216
237
  async startStream(request, callback) {
217
- var _a, _b, _c, _d, _e, _f;
218
- const cameraConfig = this.protectCamera.accessory.context.device;
219
238
  const sessionInfo = this.pendingSessions[request.sessionID];
220
239
  const sdpIpVersion = sessionInfo.addressVersion === "ipv6" ? "IP6 " : "IP4";
221
240
  // If we aren't connected, we're done.
222
- if (cameraConfig.state !== "CONNECTED") {
241
+ if (this.protectCamera.ufp.state !== "CONNECTED") {
223
242
  const errorMessage = "Unable to start video stream: the camera is offline or unavailable.";
224
- this.log.error("%s: %s", this.name(), errorMessage);
225
- callback(new Error(this.name() + ": " + errorMessage));
243
+ this.log.error(errorMessage);
244
+ callback(new Error(this.protectCamera.name + ": " + errorMessage));
226
245
  return;
227
246
  }
228
247
  // Find the best RTSP stream based on what we're looking for.
229
- this.rtspEntry = this.protectCamera.findRtsp(request.video.width, request.video.height, cameraConfig, sessionInfo.address);
248
+ this.rtspEntry = this.protectCamera.findRtsp(request.video.width, request.video.height, this.protectCamera.ufp, sessionInfo.address);
230
249
  if (!this.rtspEntry) {
231
250
  const errorMessage = "Unable to start video stream: no valid RTSP stream profile was found.";
232
- this.log.error("%s: %s %sx%s, %s fps, %s kbps.", this.name(), errorMessage, request.video.width, request.video.height, request.video.fps, request.video.max_bit_rate);
233
- callback(new Error(this.name() + ": " + errorMessage));
251
+ this.log.error("%s %sx%s, %s fps, %s kbps.", errorMessage, request.video.width, request.video.height, request.video.fps, request.video.max_bit_rate);
252
+ callback(new Error(this.protectCamera.name + ": " + errorMessage));
234
253
  return;
235
254
  }
236
- // Save our current bitrate before we modify it, but only if we're the first stream to catch concurrent streaming clients.
255
+ // Save our current bitrate before we modify it, but only if we're the first stream - we don't want to do this for
256
+ // concurrent streaming clients for this camera.
237
257
  if (!this.savedBitrate) {
238
258
  this.savedBitrate = this.protectCamera.getBitrate(this.rtspEntry.channel.id);
239
259
  if (this.savedBitrate < 0) {
@@ -243,8 +263,14 @@ class ProtectStreamingDelegate {
243
263
  // Set the desired bitrate in Protect. We don't need to for this to return, because Protect
244
264
  // will adapt the stream once it processes the configuration change.
245
265
  await this.protectCamera.setBitrate(this.rtspEntry.channel.id, request.video.max_bit_rate * 1000);
246
- // Are we transcoding?
247
- const isTranscoding = this.nvr.optionEnabled(cameraConfig, "Video.Transcode", false, sessionInfo.address);
266
+ // Has the user explicitly configured transcoding, or are we a high latency session (e.g. cellular)? If we're high latency, we'll transcode
267
+ // by default unless the user has asked us not to. Why? It generally results in a speedier experience, at the expense of some stream quality
268
+ // (HomeKit tends to request far lower bitrates than Protect is capable of producing).
269
+ //
270
+ // How do we determine if we're a high latency connection? We look at the RTP packet time of the audio packet time for a hint. HomeKit uses values
271
+ // of 20, 30, 40, and 60ms. We make an assumption, validated by lots of real-world testing, that when we see 60ms used by HomeKit, it's a
272
+ // high latency connection and act accordingly.
273
+ const isTranscoding = this.protectCamera.hints.transcode || ((request.audio.packet_time >= 60) && this.protectCamera.hints.transcodeHighLatency);
248
274
  // Set our packet size to be 564. Why? MPEG transport stream (TS) packets are 188 bytes in size each.
249
275
  // These packets transmit the video data that you ultimately see on your screen and are transmitted using
250
276
  // UDP. Each UDP packet is 1316 bytes in size, before being encapsulated in IP. We want to get as many
@@ -267,9 +293,8 @@ class ProtectStreamingDelegate {
267
293
  const videomtu = 188 * 3;
268
294
  const audiomtu = 188 * 1;
269
295
  // -hide_banner Suppress printing the startup banner in FFmpeg.
270
- // -probesize 2048 How many bytes should be analyzed for stream information. We default to to analyze time should be spent analyzing
271
- // the input stream, in microseconds.
272
- // -max_delay 500000 Set an upper limit on how much time FFmpeg can take in demuxing packets.
296
+ // -probesize number How many bytes should be analyzed for stream information.
297
+ // -max_delay 500000 Set an upper limit on how much time FFmpeg can take in demuxing packets, in microseconds.
273
298
  // -r fps Set the input frame rate for the video stream.
274
299
  // -rtsp_transport tcp Tell the RTSP stream handler that we're looking for a TCP connection.
275
300
  // -i this.rtspEntry.url RTSPS URL to get our input stream from.
@@ -277,10 +302,11 @@ class ProtectStreamingDelegate {
277
302
  // and video tracks in opposite locations from where FFmpeg typically expects them. This
278
303
  // setting is a more general solution than naming the track locations directly in case
279
304
  // Protect changes this in the future.
280
- // Yes, we included these above as well: they need to be included for every I/O stream to maximize effectiveness it seems.
305
+ //
306
+ // Yes, we included these above as well: they need to be included for each I/O stream to maximize effectiveness it seems.
281
307
  const ffmpegArgs = [
282
308
  "-hide_banner",
283
- "-probesize", "16384",
309
+ "-probesize", this.probesizeOverride ? this.probesizeOverride.toString() : this.protectCamera.hints.probesize.toString(),
284
310
  "-max_delay", "500000",
285
311
  "-r", this.rtspEntry.channel.fps.toString(),
286
312
  "-rtsp_transport", "tcp",
@@ -288,23 +314,21 @@ class ProtectStreamingDelegate {
288
314
  "-map", "0:v:0"
289
315
  ];
290
316
  // Inform the user.
291
- this.log.info("%s: Streaming request from %s: %sx%s@%sfps, %s kbps. %s %s, %s kbps.", this.name(), sessionInfo.address, request.video.width, request.video.height, request.video.fps, request.video.max_bit_rate, isTranscoding ? "Transcoding" : "Using", this.rtspEntry.name, this.rtspEntry.channel.bitrate / 1000);
317
+ this.log.info("Streaming request from %s%s: %sx%s@%sfps, %s kbps. %s %s, %s kbps. Audio packet time = %s", sessionInfo.address, (request.audio.packet_time === 60) ? " (high latency connection)" : "", request.video.width, request.video.height, request.video.fps, request.video.max_bit_rate, isTranscoding ? (this.protectCamera.hasHwAccel ? "Hardware accelerated transcoding" : "Transcoding") : "Using", this.rtspEntry.name, this.rtspEntry.channel.bitrate / 1000, request.audio.packet_time);
292
318
  // Check to see if we're transcoding. If we are, set the right FFmpeg encoder options. If not, copy the video stream.
293
319
  if (isTranscoding) {
294
320
  // Configure our video parameters for transcoding:
295
321
  //
296
- // -vcodec libx264 Copy the stream withour reencoding it.
297
- // -pix_fmt yuvj420p Use the yuvj420p pixel format, which is what Protect uses.
298
322
  // -profile:v high Use the H.264 high profile when encoding, which provides for better stream quality and size efficiency.
299
- // -preset veryfast Use the veryfast encoding preset in libx264, which provides a good balance of encoding speed and quality.
323
+ // -level:v high Use the H.264 profile level that HomeKit is requesting when encoding.
300
324
  // -bf 0 Disable B-frames when encoding to increase compatibility against occasionally finicky HomeKit clients.
301
325
  // -b:v bitrate The average bitrate to use for this stream. This is specified by HomeKit.
302
326
  // -bufsize size This is the decoder buffer size, which drives the variability / quality of the output bitrate.
303
- // -maxrate bitrate The maximum bitrate tolerance, used with -bufsize. We set this to max_bit_rate to effectively
327
+ // -maxrate bitrate The maximum bitrate tolerance, used with -bufsize. We set this with max_bit_rate to effectively
304
328
  // create a constant bitrate.
305
329
  // -filter:v fps=fps= Use the fps filter to get to the frame rate requested by HomeKit. This has better performance characteristics
306
330
  // for Protect rather than using "-r".
307
- ffmpegArgs.push("-vcodec", this.videoEncoder, "-pix_fmt", "yuvj420p", "-profile:v", "high", "-preset", "veryfast", "-bf", "0", "-b:v", request.video.max_bit_rate.toString() + "k", "-bufsize", (2 * request.video.max_bit_rate).toString() + "k", "-maxrate", request.video.max_bit_rate.toString() + "k", "-filter:v", "fps=fps=" + request.video.fps.toString());
331
+ ffmpegArgs.push(...this.videoEncoderOptions, "-profile:v", this.getH264Profile(request.video.profile), "-level:v", this.getH264Level(request.video.level), "-bf", "0", "-b:v", request.video.max_bit_rate.toString() + "k", "-bufsize", (2 * request.video.max_bit_rate).toString() + "k", "-maxrate", request.video.max_bit_rate.toString() + "k", "-filter:v", "fps=fps=" + request.video.fps.toString());
308
332
  }
309
333
  else {
310
334
  // Configure our video parameters for just copying the input stream from Protect - it tends to be quite solid in most cases:
@@ -341,15 +365,15 @@ class ProtectStreamingDelegate {
341
365
  // -ac 1 Set the number of audio channels to 1.
342
366
  if (sessionInfo.hasLibFdk) {
343
367
  // Configure our audio parameters.
344
- ffmpegArgs.push("-map", "0:a:0", "-acodec", "libfdk_aac", "-profile:a", "aac_eld", "-flags", "+global_header", "-f", "null", "-ar", request.audio.sample_rate.toString() + "k", "-b:a", request.audio.max_bit_rate.toString() + "k", "-bufsize", (2 * request.audio.max_bit_rate).toString() + "k", "-ac", "1");
368
+ ffmpegArgs.push("-map", "0:a:0", "-acodec", "libfdk_aac", "-profile:a", "aac_eld", "-flags", "+global_header", "-f", "null", "-ar", request.audio.sample_rate.toString() + "k", "-afterburner", "1", "-eld_sbr", "1", "-eld_v2", "1", "-b:a", request.audio.max_bit_rate.toString() + "k", "-bufsize", (2 * request.audio.max_bit_rate).toString() + "k", "-ac", "1");
345
369
  // If we are audio filtering, address it here.
346
- if (this.nvr.optionEnabled(cameraConfig, "Audio.Filter.Noise", false, sessionInfo.address)) {
370
+ if (this.nvr.optionEnabled(this.protectCamera.ufp, "Audio.Filter.Noise", false, sessionInfo.address)) {
347
371
  const afOptions = [];
348
372
  // See what the user has set for the afftdn filter for this camera.
349
- let fftNr = parseFloat((_a = this.nvr.optionGet(cameraConfig, "Audio.Filter.Noise.FftNr", sessionInfo.address)) !== null && _a !== void 0 ? _a : "");
373
+ let fftNr = parseFloat(this.nvr.optionGet(this.protectCamera.ufp, "Audio.Filter.Noise.FftNr", sessionInfo.address) ?? "");
350
374
  // If we have an invalid setting, use the defaults.
351
375
  if ((fftNr !== fftNr) || (fftNr < 0.01) || (fftNr > 97)) {
352
- fftNr = (fftNr > 97) ? 97 : ((fftNr < 0.01) ? 0.01 : settings_1.PROTECT_FFMPEG_AUDIO_FILTER_FFTNR);
376
+ fftNr = (fftNr > 97) ? 97 : ((fftNr < 0.01) ? 0.01 : PROTECT_FFMPEG_AUDIO_FILTER_FFTNR);
353
377
  }
354
378
  // nt=w Focus on eliminating white noise.
355
379
  // om=o Output the filtered audio.
@@ -357,21 +381,23 @@ class ProtectStreamingDelegate {
357
381
  // tr=1 Enable residual tracking.
358
382
  // nr=X Noise reduction value in decibels.
359
383
  afOptions.push("afftdn=nt=w:om=o:tn=1:tr=1:nr=" + fftNr.toString());
360
- let highpass = (_b = this.nvr.optionGet(cameraConfig, "Audio.Filter.Noise.HighPass", sessionInfo.address)) !== null && _b !== void 0 ? _b : (this.nvr.optionEnabled(cameraConfig, "Audio.Filter.Noise.HighPass", false) ? settings_1.PROTECT_FFMPEG_AUDIO_FILTER_HIGHPASS.toString() : undefined);
361
- let lowpass = (_c = this.nvr.optionGet(cameraConfig, "Audio.Filter.Noise.LowPass", sessionInfo.address)) !== null && _c !== void 0 ? _c : (this.nvr.optionEnabled(cameraConfig, "Audio.Filter.Noise.LowPass", false) ? settings_1.PROTECT_FFMPEG_AUDIO_FILTER_LOWPASS.toString() : undefined);
384
+ let highpass = this.nvr.optionGet(this.protectCamera.ufp, "Audio.Filter.Noise.HighPass", sessionInfo.address) ??
385
+ (this.nvr.optionEnabled(this.protectCamera.ufp, "Audio.Filter.Noise.HighPass", false) ? PROTECT_FFMPEG_AUDIO_FILTER_HIGHPASS.toString() : undefined);
386
+ let lowpass = this.nvr.optionGet(this.protectCamera.ufp, "Audio.Filter.Noise.LowPass", sessionInfo.address) ??
387
+ (this.nvr.optionEnabled(this.protectCamera.ufp, "Audio.Filter.Noise.LowPass", false) ? PROTECT_FFMPEG_AUDIO_FILTER_LOWPASS.toString() : undefined);
362
388
  // Only set the highpass and lowpass filters if the user has explicitly enabled them.
363
389
  if ((highpass !== undefined) || (lowpass !== undefined)) {
364
390
  // See what the user has set for the highpass filter for this camera.
365
- highpass = parseInt(highpass !== null && highpass !== void 0 ? highpass : "");
391
+ highpass = parseInt(highpass ?? "");
366
392
  // If we have an invalid setting, use the defaults.
367
393
  if ((highpass !== highpass) || (highpass < 0)) {
368
- highpass = settings_1.PROTECT_FFMPEG_AUDIO_FILTER_HIGHPASS;
394
+ highpass = PROTECT_FFMPEG_AUDIO_FILTER_HIGHPASS;
369
395
  }
370
396
  // See what the user has set for the highpass filter for this camera.
371
- lowpass = parseInt(lowpass !== null && lowpass !== void 0 ? lowpass : "");
397
+ lowpass = parseInt(lowpass ?? "");
372
398
  // If we have an invalid setting, use the defaults.
373
399
  if ((lowpass !== lowpass) || (lowpass < 0)) {
374
- lowpass = settings_1.PROTECT_FFMPEG_AUDIO_FILTER_LOWPASS;
400
+ lowpass = PROTECT_FFMPEG_AUDIO_FILTER_LOWPASS;
375
401
  }
376
402
  afOptions.push("highpass=f=" + highpass.toString(), "lowpass=f=" + lowpass.toString());
377
403
  }
@@ -396,12 +422,16 @@ class ProtectStreamingDelegate {
396
422
  ffmpegArgs.push("-loglevel", "level+debug");
397
423
  }
398
424
  // Combine everything and start an instance of FFmpeg.
399
- const ffmpegStream = new protect_ffmpeg_stream_1.FfmpegStreamingProcess(this, request.sessionID, ffmpegArgs, (sessionInfo.hasLibFdk && this.protectCamera.twoWayAudio) ? undefined : { addressVersion: sessionInfo.addressVersion, port: sessionInfo.videoReturnPort }, callback);
425
+ const ffmpegStream = new FfmpegStreamingProcess(this, request.sessionID, ffmpegArgs, (sessionInfo.hasLibFdk && this.protectCamera.hints.twoWayAudio) ? undefined : { addressVersion: sessionInfo.addressVersion, port: sessionInfo.videoReturnPort }, callback);
400
426
  // Some housekeeping for our FFmpeg and demuxer sessions.
401
- this.ongoingSessions[request.sessionID] = { ffmpeg: [ffmpegStream], rtpDemuxer: sessionInfo.rtpDemuxer };
427
+ this.ongoingSessions[request.sessionID] = {
428
+ ffmpeg: [ffmpegStream],
429
+ rtpDemuxer: sessionInfo.rtpDemuxer,
430
+ rtpPortReservations: sessionInfo.rtpPortReservations
431
+ };
402
432
  delete this.pendingSessions[request.sessionID];
403
433
  // If we aren't doing two-way audio, we're done here. For two-way audio...we have some more plumbing to do.
404
- if (!sessionInfo.hasLibFdk || !this.protectCamera.twoWayAudio) {
434
+ if (!sessionInfo.hasLibFdk || !this.protectCamera.hints.twoWayAudio) {
405
435
  return;
406
436
  }
407
437
  // Session description protocol message that FFmpeg will share with HomeKit.
@@ -416,19 +446,19 @@ class ProtectStreamingDelegate {
416
446
  // t Timestamps for the start and end of the session.
417
447
  // m Media type - audio, adhering to RTP/AVP, payload type 110.
418
448
  // b Bandwidth information - application specific, 24k.
419
- // a=rtpmap Payload type 110 corresponds to an MP4 stream.
449
+ // a=rtpmap Payload type 110 corresponds to an MP4 stream. Format is MPEG4-GENERIC/<audio clock rate>/<audio channels>
420
450
  // a=fmtp For payload type 110, use these format parameters.
421
451
  // a=crypto Crypto suite to use for this session.
422
452
  const sdpReturnAudio = [
423
453
  "v=0",
424
454
  "o=- 0 0 IN " + sdpIpVersion + " 127.0.0.1",
425
- "s=" + this.name() + " Audio Talkback",
455
+ "s=" + this.protectCamera.name + " Audio Talkback",
426
456
  "c=IN " + sdpIpVersion + " " + sessionInfo.address,
427
457
  "t=0 0",
428
458
  "m=audio " + sessionInfo.audioIncomingRtpPort.toString() + " RTP/AVP 110",
429
459
  "b=AS:24",
430
- "a=rtpmap:110 MPEG4-GENERIC/16000/1",
431
- "a=fmtp:110 profile-level-id=1;mode=AAC-hbr;sizelength=13;indexlength=3;indexdeltalength=3; config=F8F0212C00BC00",
460
+ "a=rtpmap:110 MPEG4-GENERIC/24000/1",
461
+ "a=fmtp:110 profile-level-id=1;mode=AAC-hbr;sizelength=13;indexlength=3;indexdeltalength=3; config=F8EC212C00BC00",
432
462
  "a=crypto:1 AES_CM_128_HMAC_SHA1_80 inline:" + sessionInfo.audioSRTP.toString("base64")
433
463
  ].join("\n");
434
464
  // Configure the audio portion of the command line, if we have a version of FFmpeg supports libfdk_aac. Options we use are:
@@ -438,11 +468,13 @@ class ProtectStreamingDelegate {
438
468
  // -f sdp Specify that our input will be an SDP file.
439
469
  // -acodec libfdk_aac Decode AAC input.
440
470
  // -i pipe:0 Read input from standard input.
441
- // -acodec aac Encode to AAC. This is set by Protect.
471
+ // -acodec libfdk_aac Encode to AAC. This format is set by Protect.
442
472
  // -flags +global_header Sets the global header in the bitstream.
443
- // -ar samplerate Sample rate to use for this audio. This is specified by Protect.
444
- // -b:a bitrate Bitrate to use for this audio. This is specified by Protect.
445
- // -ac 1 Set the number of audio channels to 1. This is specified by Protect.
473
+ // -afterburner 1 Increases audio quality at the expense of needing a little bit more computational power in libfdk_aac.
474
+ // -eld_sbr 1 Use spectral band replication to further enhance audio.
475
+ // -eld_v2 1 Use the enhanced low delay v2 standard for better audio characteristics.
476
+ // -af Use the aformat audio filter to set the channel layout to mono and use the Protect-provided sample
477
+ // rate to produce the right audio needed for talkback.
446
478
  // -f adts Transmit an ADTS stream.
447
479
  // pipe:1 Output the ADTS stream to standard output.
448
480
  const ffmpegReturnAudioCmd = [
@@ -451,11 +483,12 @@ class ProtectStreamingDelegate {
451
483
  "-f", "sdp",
452
484
  "-acodec", "libfdk_aac",
453
485
  "-i", "pipe:0",
486
+ "-acodec", "libfdk_aac",
454
487
  "-flags", "+global_header",
455
- "-b:a", cameraConfig.talkbackSettings.bitsPerSample.toString() + "k",
456
- "-ac", cameraConfig.talkbackSettings.channels.toString(),
457
- "-ar", cameraConfig.talkbackSettings.samplingRate.toString(),
458
- "-loglevel", "level+verbose",
488
+ "-afterburner", "1",
489
+ "-eld_sbr", "1",
490
+ "-eld_v2", "1",
491
+ "-af", "aformat=channel_layouts=mono:sample_rates=" + this.protectCamera.ufp.talkbackSettings.samplingRate.toString(),
459
492
  "-f", "adts",
460
493
  "pipe:1"
461
494
  ];
@@ -474,68 +507,67 @@ class ProtectStreamingDelegate {
474
507
  let openListener;
475
508
  if (sessionInfo.talkBack) {
476
509
  // Open the talkback connection.
477
- ws = new ws_1.default(sessionInfo.talkBack, { rejectUnauthorized: false });
510
+ ws = new WebSocket(sessionInfo.talkBack, { rejectUnauthorized: false });
478
511
  isTalkbackLive = true;
479
512
  // Catch any errors and inform the user, if needed.
480
- ws === null || ws === void 0 ? void 0 : ws.once("error", (error) => {
513
+ ws?.once("error", (error) => {
481
514
  // Ignore timeout errors, but notify the user about anything else.
482
515
  if (error.code !== "ETIMEDOUT") {
483
- this.log.error("%s: Error in communicating with the return audio channel: %s", this.name(), error);
516
+ this.log.error("Error in communicating with the return audio channel: %s", error);
484
517
  }
485
- ws === null || ws === void 0 ? void 0 : ws.terminate();
518
+ ws?.terminate();
486
519
  });
487
520
  // Catch any stray open events after we've closed.
488
- ws === null || ws === void 0 ? void 0 : ws.on("open", openListener = () => {
521
+ ws?.on("open", openListener = () => {
489
522
  // If we've somehow opened after we've wrapped up talkback, terminate the connection.
490
523
  if (!isTalkbackLive) {
491
- ws === null || ws === void 0 ? void 0 : ws.terminate();
524
+ ws?.terminate();
492
525
  }
493
526
  });
494
527
  // Cleanup after ourselves on close.
495
- ws === null || ws === void 0 ? void 0 : ws.once("close", () => {
496
- ws === null || ws === void 0 ? void 0 : ws.removeListener("open", openListener);
528
+ ws?.once("close", () => {
529
+ ws?.removeListener("open", openListener);
497
530
  });
498
531
  }
499
532
  // Wait for the first RTP packet to be received before trying to launch FFmpeg.
500
533
  if (sessionInfo.rtpDemuxer) {
501
- await events_1.default.once(sessionInfo.rtpDemuxer, "rtp");
534
+ await events.once(sessionInfo.rtpDemuxer, "rtp");
502
535
  // If we've already closed the RTP demuxer, we're done here,
503
536
  if (!sessionInfo.rtpDemuxer.isRunning) {
504
537
  // Clean up our talkback websocket.
505
- ws === null || ws === void 0 ? void 0 : ws.terminate();
538
+ ws?.terminate();
506
539
  return;
507
540
  }
508
541
  }
509
542
  // Fire up FFmpeg and start processing the incoming audio.
510
- const ffmpegReturnAudio = new protect_ffmpeg_stream_1.FfmpegStreamingProcess(this, request.sessionID, ffmpegReturnAudioCmd);
543
+ const ffmpegReturnAudio = new FfmpegStreamingProcess(this, request.sessionID, ffmpegReturnAudioCmd);
511
544
  // Setup housekeeping for the twoway FFmpeg session.
512
545
  this.ongoingSessions[request.sessionID].ffmpeg.push(ffmpegReturnAudio);
513
546
  // Feed the SDP session description to FFmpeg on stdin.
514
- (_d = ffmpegReturnAudio.stdin) === null || _d === void 0 ? void 0 : _d.end(sdpReturnAudio + "\n");
547
+ ffmpegReturnAudio.stdin?.end(sdpReturnAudio + "\n");
515
548
  // Send the audio.
516
- (_e = ffmpegReturnAudio.stdout) === null || _e === void 0 ? void 0 : _e.on("data", dataListener = (data) => {
517
- ws === null || ws === void 0 ? void 0 : ws.send(data, (error) => {
549
+ ffmpegReturnAudio.stdout?.on("data", dataListener = (data) => {
550
+ ws?.send(data, (error) => {
518
551
  // This happens when an error condition is encountered on sending data to the websocket.
519
552
  // We assume the worst and close our talkback channel.
520
553
  if (error) {
521
- ws === null || ws === void 0 ? void 0 : ws.terminate();
554
+ ws?.terminate();
522
555
  }
523
556
  });
524
557
  });
525
558
  // Make sure we terminate the talkback websocket when we're done.
526
- (_f = ffmpegReturnAudio.ffmpegProcess) === null || _f === void 0 ? void 0 : _f.once("exit", () => {
527
- var _a;
559
+ ffmpegReturnAudio.ffmpegProcess?.once("exit", () => {
528
560
  // Make sure we catch any stray connections that may be too slow to open.
529
561
  isTalkbackLive = false;
530
562
  // Close the websocket.
531
- if (((ws === null || ws === void 0 ? void 0 : ws.readyState) === ws_1.default.CLOSING) || ((ws === null || ws === void 0 ? void 0 : ws.readyState) === ws_1.default.OPEN)) {
532
- ws === null || ws === void 0 ? void 0 : ws.terminate();
563
+ if ((ws?.readyState === WebSocket.CLOSING) || (ws?.readyState === WebSocket.OPEN)) {
564
+ ws?.terminate();
533
565
  }
534
- (_a = ffmpegReturnAudio.stdout) === null || _a === void 0 ? void 0 : _a.removeListener("data", dataListener);
566
+ ffmpegReturnAudio.stdout?.removeListener("data", dataListener);
535
567
  });
536
568
  }
537
569
  catch (error) {
538
- this.log.error("%s: Unable to connect to the return audio channel: %s", this.name(), error);
570
+ this.log.error("Unable to connect to the return audio channel: %s", error);
539
571
  }
540
572
  }
541
573
  // Process incoming stream requests.
@@ -546,7 +578,7 @@ class ProtectStreamingDelegate {
546
578
  break;
547
579
  case "reconfigure" /* StreamRequestTypes.RECONFIGURE */:
548
580
  // Once FFmpeg is updated to support this, we'll enable this one.
549
- this.log.info("%s: Streaming parameters adjustment requested by HomeKit: %sx%s, %s fps, %s kbps.", this.name(), request.video.width, request.video.height, request.video.fps, request.video.max_bit_rate);
581
+ this.log.info("Streaming parameters adjustment requested by HomeKit: %sx%s, %s fps, %s kbps.", request.video.width, request.video.height, request.video.fps, request.video.max_bit_rate);
550
582
  // Set the desired bitrate in Protect.
551
583
  if (this.rtspEntry) {
552
584
  await this.protectCamera.setBitrate(this.rtspEntry.channel.id, request.video.max_bit_rate * 1000);
@@ -563,71 +595,83 @@ class ProtectStreamingDelegate {
563
595
  // Retrieve a cached snapshot, if available.
564
596
  getCachedSnapshot(cameraMac) {
565
597
  // If we have an image from the last few seconds, we can use it. Otherwise, we're done.
566
- if (!this.snapshotCache[cameraMac] || ((Date.now() - this.snapshotCache[cameraMac].time) > (60 * 1000))) {
598
+ if (!this.snapshotCache[cameraMac] || ((Date.now() - this.snapshotCache[cameraMac].time) > (PROTECT_SNAPSHOT_CACHE_MAXAGE * 1000))) {
567
599
  delete this.snapshotCache[cameraMac];
568
600
  return null;
569
601
  }
570
602
  return this.snapshotCache[cameraMac].image;
571
603
  }
572
604
  // Take a snapshot.
573
- async getSnapshot(request) {
574
- const cameraConfig = this.protectCamera.accessory.context.device;
575
- const params = new URLSearchParams({ force: "true" });
605
+ async getSnapshot(request, isLoggingErrors = true) {
606
+ const logError = (message, ...parameters) => {
607
+ // We don't need to log errors for snapshot cache refreshes.
608
+ if (isLoggingErrors) {
609
+ this.log.error(message, ...parameters);
610
+ }
611
+ };
612
+ const params = new URLSearchParams({ ts: Date.now().toString() });
576
613
  // If we aren't connected, we're done.
577
- if (cameraConfig.state !== "CONNECTED") {
578
- this.log.error("%s: Unable to retrieve a snapshot: the camera is offline or unavailable.", this.name());
614
+ if (this.protectCamera.ufp.state !== "CONNECTED") {
615
+ logError("Unable to retrieve a snapshot: the camera is offline or unavailable.");
579
616
  return null;
580
617
  }
581
618
  // If we have details of the snapshot request, use it to request the right size.
582
619
  if (request) {
583
- params.append("width", request.width.toString());
584
- params.append("height", request.height.toString());
620
+ params.append("h", request.height.toString());
621
+ params.append("w", request.width.toString());
622
+ }
623
+ // Don't log the inevitable API errors related to response delays from the Protect controller.
624
+ const savedLogState = this.nvr.logApiErrors;
625
+ if (!isLoggingErrors) {
626
+ this.nvr.logApiErrors = false;
585
627
  }
586
628
  // Request the image from the controller.
587
- const response = await this.nvr.nvrApi.fetch(this.protectCamera.snapshotUrl + "?" + params.toString(), { method: "GET" }, true, false);
588
- // Occasional snapshot failures will happen. The controller isn't always able to generate them if
589
- // it's already generating one, or it's requested too quickly after the last one.
590
- if (!(response === null || response === void 0 ? void 0 : response.ok)) {
629
+ const response = await this.nvr.ufpApi.fetch(this.protectCamera.snapshotUrl + "?" + params.toString(), { method: "GET" }, true, false);
630
+ if (!isLoggingErrors) {
631
+ this.nvr.logApiErrors = savedLogState;
632
+ }
633
+ // Occasional snapshot failures will happen. The controller isn't always able to generate them if it's already generating one,
634
+ // or it's requested too quickly after the last one.
635
+ if (!response?.ok) {
591
636
  // See if we have an image cached that we can use instead.
592
- const cachedSnapshot = this.getCachedSnapshot(cameraConfig.mac);
637
+ const cachedSnapshot = this.getCachedSnapshot(this.protectCamera.ufp.mac);
593
638
  if (cachedSnapshot) {
594
- this.log.error("%s: Unable to retrieve a snapshot. Using the most recent cached snapshot instead.", this.name());
639
+ logError("Unable to retrieve a snapshot. Using the most recent cached snapshot instead.");
595
640
  return cachedSnapshot;
596
641
  }
597
- this.log.error("%s: Unable to retrieve a snapshot.%s", this.name(), response ? " " + response.status.toString() + " - " + response.statusText + "." : "");
642
+ logError("Unable to retrieve a snapshot.%s", response ? " " + response.status.toString() + " - " + response.statusText + "." : "");
598
643
  return null;
599
644
  }
600
645
  try {
601
646
  // Retrieve the image.
602
- this.snapshotCache[cameraConfig.mac] = { image: Buffer.from(await response.arrayBuffer()), time: Date.now() };
603
- return this.snapshotCache[cameraConfig.mac].image;
647
+ this.snapshotCache[this.protectCamera.ufp.mac] = { image: Buffer.from(await response.arrayBuffer()), time: Date.now() };
648
+ return this.snapshotCache[this.protectCamera.ufp.mac].image;
604
649
  }
605
650
  catch (error) {
606
- if (error instanceof unifi_protect_1.FetchError) {
651
+ if (error instanceof FetchError) {
607
652
  let cachedSnapshot;
608
653
  switch (error.code) {
609
654
  case "ERR_STREAM_PREMATURE_CLOSE":
610
- cachedSnapshot = this.getCachedSnapshot(cameraConfig.mac);
655
+ cachedSnapshot = this.getCachedSnapshot(this.protectCamera.ufp.mac);
611
656
  if (cachedSnapshot) {
612
- this.log.error("%s: Unable to retrieve a snapshot. Using a cached snapshot instead.", this.name());
657
+ logError("Unable to retrieve a snapshot. Using a cached snapshot instead.");
613
658
  return cachedSnapshot;
614
659
  }
615
- this.log.error("%s: Unable to retrieve a snapshot: the Protect controller closed the connection prematurely.", this.name());
660
+ logError("Unable to retrieve a snapshot: the Protect controller closed the connection prematurely.");
616
661
  return null;
617
662
  break;
618
663
  default:
619
- this.log.error("%s: Unknown error: %s", this.name(), error.message);
664
+ this.log.error("Unknown error: %s", error.message);
620
665
  return null;
621
666
  break;
622
667
  }
623
668
  }
624
- this.log.error("%s: An error occurred while making a snapshot request: %s.", this.name(), error);
669
+ this.log.error("An error occurred while making a snapshot request: %s.", error);
625
670
  return null;
626
671
  }
627
672
  }
628
673
  // Close a video stream.
629
674
  async stopStream(sessionId) {
630
- var _a, _b;
631
675
  try {
632
676
  // Stop any FFmpeg instances we have running.
633
677
  if (this.ongoingSessions[sessionId]) {
@@ -635,18 +679,25 @@ class ProtectStreamingDelegate {
635
679
  ffmpegProcess.stop();
636
680
  }
637
681
  // Close the demuxer, if we have one.
638
- (_a = this.ongoingSessions[sessionId].rtpDemuxer) === null || _a === void 0 ? void 0 : _a.close();
682
+ this.ongoingSessions[sessionId].rtpDemuxer?.close();
639
683
  // Inform the user.
640
- this.log.info("%s: Stopped video streaming session.", this.name());
684
+ this.log.info("Stopped video streaming session.");
685
+ // Release our port reservations.
686
+ this.ongoingSessions[sessionId].rtpPortReservations.map(x => this.platform.rtpPorts.freePort(x));
687
+ }
688
+ // On the off chance we were signaled to prepare to start streaming, but never actually started streaming, cleanup after ourselves.
689
+ if (this.pendingSessions[sessionId]) {
690
+ // Release our port reservations.
691
+ this.pendingSessions[sessionId].rtpPortReservations.map(x => this.platform.rtpPorts.freePort(x));
641
692
  }
642
693
  // Delete the entries.
643
694
  delete this.pendingSessions[sessionId];
644
695
  delete this.ongoingSessions[sessionId];
645
696
  // If we've completed all streaming sessions, restore any changed settings, such as bitrate, for HomeKit Secure Video.
646
697
  if (!this.ongoingSessions.length) {
647
- if ((_b = this.hksv) === null || _b === void 0 ? void 0 : _b.isRecording) {
648
- // Restore HKSV settings.
649
- await this.hksv.updateRecordingActive(this.hksv.isRecording);
698
+ if (this.hksv?.isRecording) {
699
+ // Restart the timeshift buffer now that we've stopped streaming.
700
+ await this.hksv.restartTimeshifting();
650
701
  }
651
702
  else if (this.savedBitrate) {
652
703
  // Restore our original bitrate.
@@ -658,7 +709,7 @@ class ProtectStreamingDelegate {
658
709
  }
659
710
  }
660
711
  catch (error) {
661
- this.log.error("%s: Error occurred while ending the FFmpeg video processes: %s.", this.name(), error);
712
+ this.log.error("Error occurred while ending the FFmpeg video processes: %s.", error);
662
713
  }
663
714
  }
664
715
  // Shutdown all our video streams.
@@ -668,6 +719,97 @@ class ProtectStreamingDelegate {
668
719
  await this.stopStream(session);
669
720
  }
670
721
  }
722
+ // Adjust our probe hints.
723
+ adjustProbeSize() {
724
+ if (this.probesizeOverrideTimeout) {
725
+ clearTimeout(this.probesizeOverrideTimeout);
726
+ this.probesizeOverrideTimeout = undefined;
727
+ }
728
+ // Maintain statistics on how often we need to adjust our probesize. If this happens too frequently, we will default to a working value.
729
+ this.probesizeOverrideCount++;
730
+ // Increase the probesize by a factor of two each time we need to do something about it. This idea is to balance the latency implications
731
+ // for the user, but also ensuring we have a functional streaming experience.
732
+ this.probesizeOverride = (this.probesizeOverride ? this.probesizeOverride : this.protectCamera.hints.probesize) * 2;
733
+ // Safety check to make sure this never gets too crazy.
734
+ if (this.probesizeOverride > 5000000) {
735
+ this.probesizeOverride = 5000000;
736
+ }
737
+ this.log.error("The FFmpeg process ended unexpectedly due to issues with the media stream provided by the UniFi Protect livestream API. " +
738
+ "Adjusting the settings we use for FFmpeg %s to use safer values at the expense of some additional streaming startup latency.", this.probesizeOverrideCount < 10 ? "temporarily" : "permanently");
739
+ // If this happens often enough, keep the override in place permanently.
740
+ if (this.probesizeOverrideCount < 10) {
741
+ this.probesizeOverrideTimeout = setTimeout(() => {
742
+ this.probesizeOverride = 0;
743
+ this.probesizeOverrideTimeout = undefined;
744
+ }, 1000 * 60 * 10);
745
+ }
746
+ }
747
+ // Translate HomeKit H.264 level information for FFmpeg.
748
+ getH264Level(level) {
749
+ switch (level) {
750
+ case 0 /* H264Level.LEVEL3_1 */:
751
+ return "3.1";
752
+ break;
753
+ case 1 /* H264Level.LEVEL3_2 */:
754
+ return "3.2";
755
+ break;
756
+ case 2 /* H264Level.LEVEL4_0 */:
757
+ return "4.0";
758
+ break;
759
+ default:
760
+ return "3.1";
761
+ break;
762
+ }
763
+ }
764
+ // Translate HomeKit H.264 profile information for FFmpeg.
765
+ getH264Profile(profile) {
766
+ switch (profile) {
767
+ case 0 /* H264Profile.BASELINE */:
768
+ return "baseline";
769
+ break;
770
+ case 2 /* H264Profile.HIGH */:
771
+ return "high";
772
+ break;
773
+ case 1 /* H264Profile.MAIN */:
774
+ return "main";
775
+ break;
776
+ default:
777
+ return "main";
778
+ break;
779
+ }
780
+ }
781
+ // Determine the video encoder to use when transcoding.
782
+ getVideoEncoderOptions() {
783
+ // Default to the tried-and-true libx264. We use the following options by default:
784
+ //
785
+ // -pix_fmt yuvj420p Use the yuvj420p pixel format, which is what Protect uses.
786
+ // -preset veryfast Use the veryfast encoding preset in libx264, which provides a good balance of encoding
787
+ // speed and quality.
788
+ let encoder = "libx264";
789
+ let encoderOptions = "-pix_fmt yuvj420p -preset veryfast";
790
+ // If the user has specified a video encoder, let's use it.
791
+ if (this.config.videoEncoder) {
792
+ encoder = this.config.videoEncoder;
793
+ }
794
+ // If we've enabled hardware-accelerated transcoding, Let's deduce what we are running on, and select encoder options accordingly.
795
+ if (this.protectCamera.hints.hardwareTranscoding) {
796
+ this.protectCamera.hasHwAccel = true;
797
+ switch (platform) {
798
+ case "darwin":
799
+ // h264_videotoolbox is the macOS hardware encoder API. We use the following options by default:
800
+ //
801
+ // -pix_fmt nv12 videotoolbox doesn't support the full yuvj420p pixel format, so we use nv12 to get us close.
802
+ // -coder cabac Use the cabac encoder for better video quality with the encoding profiles we use in HBUP.
803
+ encoder = "h264_videotoolbox";
804
+ encoderOptions = "-pix_fmt nv12 -coder cabac";
805
+ break;
806
+ default:
807
+ // Back to software encoding.
808
+ this.protectCamera.hasHwAccel = false;
809
+ break;
810
+ }
811
+ }
812
+ return ["-vcodec", encoder, ...encoderOptions.split(" ")];
813
+ }
671
814
  }
672
- exports.ProtectStreamingDelegate = ProtectStreamingDelegate;
673
815
  //# sourceMappingURL=protect-stream.js.map