homebridge-nest-accfactory 0.3.0 → 0.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -8,34 +8,34 @@
8
8
  import EventEmitter from 'node:events';
9
9
  import { Buffer } from 'node:buffer';
10
10
  import { setTimeout, clearTimeout } from 'node:timers';
11
- import process from 'node:process';
12
- import child_process from 'node:child_process';
13
- import net from 'node:net';
14
11
  import dgram from 'node:dgram';
15
12
  import fs from 'node:fs';
16
13
  import path from 'node:path';
17
- import { fileURLToPath } from 'node:url';
18
14
 
19
15
  // Define our modules
20
16
  import HomeKitDevice from '../HomeKitDevice.js';
17
+ import Streamer from '../streamer.js';
21
18
  import NexusTalk from '../nexustalk.js';
22
19
  import WebRTC from '../webrtc.js';
20
+ import FFmpeg from '../ffmpeg.js';
21
+ import { processCommonData, parseDurationToSeconds, scaleValue } from '../utils.js';
23
22
 
24
- const CAMERARESOURCE = {
25
- offline: 'Nest_camera_offline.jpg',
26
- off: 'Nest_camera_off.jpg',
27
- transfer: 'Nest_camera_transfer.jpg',
23
+ // Define constants
24
+ import { DATA_SOURCE, PROTOBUF_RESOURCES, __dirname, RESOURCE_PATH, RESOURCE_IMAGES, DEVICE_TYPE, TIMERS } from '../consts.js';
25
+
26
+ const MP4BOX = 'mp4box';
27
+ const STREAMING_PROTOCOL = {
28
+ WEBRTC: 'PROTOCOL_WEBRTC',
29
+ NEXUSTALK: 'PROTOCOL_NEXUSTALK',
28
30
  };
29
- const MP4BOX = 'mp4box'; // MP4 box fragement event for HKSV recording
30
- const SNAPSHOTCACHETIMEOUT = 30000; // Timeout for retaining snapshot image (in milliseconds)
31
- const PROTOCOLWEBRTC = 'PROTOCOL_WEBRTC';
32
- const PROTOCOLNEXUSTALK = 'PROTOCOL_NEXUSTALK';
33
- const RESOURCEPATH = '../res';
34
- const __dirname = path.dirname(fileURLToPath(import.meta.url)); // Make a defined for JS __dirname
35
31
 
36
32
  export default class NestCamera extends HomeKitDevice {
37
33
  static TYPE = 'Camera';
38
- static VERSION = '2025.06.12';
34
+ static VERSION = '2025.08.04'; // Code version
35
+
36
+ // For messaging back to parent class (Doorbell/Floodlight)
37
+ static SET = HomeKitDevice.SET;
38
+ static GET = HomeKitDevice.GET;
39
39
 
40
40
  controller = undefined; // HomeKit Camera/Doorbell controller service
41
41
  streamer = undefined; // Streamer object for live/recording stream
@@ -44,24 +44,23 @@ export default class NestCamera extends HomeKitDevice {
44
44
  operatingModeService = undefined; // Link to camera/doorbell operating mode service
45
45
  personTimer = undefined; // Cooldown timer for person/face events
46
46
  motionTimer = undefined; // Cooldown timer for motion events
47
- snapshotTimer = undefined; // Timer for cached snapshot images
48
- lastSnapshotImage = undefined; // JPG image buffer for last camera snapshot
49
47
  snapshotEvent = undefined; // Event for which to get snapshot for
48
+ ffmpeg = undefined; // FFMpeg object class
50
49
 
51
50
  // Internal data only for this class
52
- #hkSessions = []; // Track live and recording active sessions
51
+ #liveSessions = new Map(); // Track active HomeKit live stream sessions (port, crypto, rtpSplitter)
53
52
  #recordingConfig = {}; // HomeKit Secure Video recording configuration
54
- #cameraOfflineImage = undefined; // JPG image buffer for camera offline
55
- #cameraVideoOffImage = undefined; // JPG image buffer for camera video off
56
- #cameraTransferringImage = undefined; // JPG image buffer for camera transferring between Nest/Google Home
53
+ #cameraImages = {}; // Snapshot resource images
54
+ #snapshotTimer = undefined; // Timer for cached snapshot images
55
+ #lastSnapshotImage = undefined; // JPG image buffer for last camera snapshot
57
56
 
58
- constructor(accessory, api, log, eventEmitter, deviceData) {
59
- super(accessory, api, log, eventEmitter, deviceData);
57
+ constructor(accessory, api, log, deviceData) {
58
+ super(accessory, api, log, deviceData);
60
59
 
61
- // Load supporrt image files as required
62
- const loadImageIfExists = (filename, label) => {
60
+ // Load support image files as required
61
+ const loadImageResource = (filename, label) => {
63
62
  let buffer = undefined;
64
- let file = path.resolve(__dirname, RESOURCEPATH, filename);
63
+ let file = path.resolve(__dirname, RESOURCE_PATH, filename);
65
64
  if (fs.existsSync(file) === true) {
66
65
  buffer = fs.readFileSync(file);
67
66
  } else {
@@ -70,25 +69,36 @@ export default class NestCamera extends HomeKitDevice {
70
69
  return buffer;
71
70
  };
72
71
 
73
- this.#cameraOfflineImage = loadImageIfExists(CAMERARESOURCE.offline, 'offline');
74
- this.#cameraVideoOffImage = loadImageIfExists(CAMERARESOURCE.off, 'video off');
75
- this.#cameraTransferringImage = loadImageIfExists(CAMERARESOURCE.transfer, 'transferring');
72
+ this.#cameraImages = {
73
+ offline: loadImageResource(RESOURCE_IMAGES.CAMERA_OFFLINE, 'offline'),
74
+ off: loadImageResource(RESOURCE_IMAGES.CAMERA_OFF, 'video off'),
75
+ transfer: loadImageResource(RESOURCE_IMAGES.CAMERA_TRANSFER, 'transferring'),
76
+ };
77
+
78
+ // Create ffmpeg object if have been told valid binary
79
+ if (typeof this.deviceData?.ffmpeg?.binary === 'string' && this.deviceData?.ffmpeg?.valid === true) {
80
+ this.ffmpeg = new FFmpeg(this.deviceData?.ffmpeg?.binary, log);
81
+ }
76
82
  }
77
83
 
78
84
  // Class functions
79
- setupDevice(hapController = this.hap.CameraController) {
80
- // Setup motion services
85
+ onAdd() {
86
+ // Setup motion services. This needs to be done before we setup the HomeKit camera controller
81
87
  if (this.motionServices === undefined) {
82
88
  this.createCameraMotionServices();
83
89
  }
84
90
 
85
- // Setup HomeKit camera/doorbell controller
86
- if (this.controller === undefined && typeof hapController === 'function') {
87
- // Need to cleanup the CameraOperatingMode service. This is to allow seamless configuration
88
- // switching between enabling hksv or not
89
- // Thanks to @bcullman (Brad Ullman) for catching this
90
- this.accessory.removeService(this.accessory.getService(this.hap.Service.CameraOperatingMode));
91
- this.controller = new hapController(this.generateControllerOptions());
91
+ // Setup HomeKit camera controller
92
+ // Need to cleanup the CameraOperatingMode service. This is to allow seamless configuration
93
+ // switching between enabling hksv or not
94
+ // Thanks to @bcullman (Brad Ullman) for catching this
95
+ this.accessory.removeService(this.accessory.getService(this.hap.Service.CameraOperatingMode));
96
+ if (this.controller === undefined) {
97
+ // Establish the "camera" controller here
98
+ this.controller = new this.hap.CameraController(this.generateControllerOptions());
99
+ }
100
+ if (this.controller !== undefined) {
101
+ // Configure the controller thats been created
92
102
  this.accessory.configureController(this.controller);
93
103
  }
94
104
 
@@ -110,7 +120,7 @@ export default class NestCamera extends HomeKitDevice {
110
120
  (value === true && this.deviceData.statusled_brightness !== 0) ||
111
121
  (value === false && this.deviceData.statusled_brightness !== 1)
112
122
  ) {
113
- this.set({ uuid: this.deviceData.nest_google_uuid, statusled_brightness: value === true ? 0 : 1 });
123
+ this.message(HomeKitDevice.SET, { uuid: this.deviceData.nest_google_uuid, statusled_brightness: value === true ? 0 : 1 });
114
124
  this?.log?.info?.('Recording status LED on "%s" was turned', this.deviceData.description, value === true ? 'on' : 'off');
115
125
  }
116
126
  },
@@ -125,7 +135,10 @@ export default class NestCamera extends HomeKitDevice {
125
135
  onSet: (value) => {
126
136
  // only change IRLed status value if different than on-device
127
137
  if ((value === false && this.deviceData.irled_enabled === true) || (value === true && this.deviceData.irled_enabled === false)) {
128
- this.set({ uuid: this.deviceData.nest_google_uuid, irled_enabled: value === true ? 'auto_on' : 'always_off' });
138
+ this.message(HomeKitDevice.SET, {
139
+ uuid: this.deviceData.nest_google_uuid,
140
+ irled_enabled: value === true ? 'auto_on' : 'always_off',
141
+ });
129
142
 
130
143
  this?.log?.info?.('Night vision on "%s" was turned', this.deviceData.description, value === true ? 'on' : 'off');
131
144
  }
@@ -145,7 +158,7 @@ export default class NestCamera extends HomeKitDevice {
145
158
  (this.deviceData.streaming_enabled === true && value === true)
146
159
  ) {
147
160
  // Camera state does not reflect requested state, so fix
148
- this.set({ uuid: this.deviceData.nest_google_uuid, streaming_enabled: value === false ? true : false });
161
+ this.message(HomeKitDevice.SET, { uuid: this.deviceData.nest_google_uuid, streaming_enabled: value === false ? true : false });
149
162
  this?.log?.info?.('Camera on "%s" was turned', this.deviceData.description, value === false ? 'on' : 'off');
150
163
  }
151
164
  }
@@ -175,7 +188,7 @@ export default class NestCamera extends HomeKitDevice {
175
188
  (this.deviceData.audio_enabled === true && value === this.hap.Characteristic.RecordingAudioActive.DISABLE) ||
176
189
  (this.deviceData.audio_enabled === false && value === this.hap.Characteristic.RecordingAudioActive.ENABLE)
177
190
  ) {
178
- this.set({
191
+ this.message(HomeKitDevice.SET, {
179
192
  uuid: this.deviceData.nest_google_uuid,
180
193
  audio_enabled: value === this.hap.Characteristic.RecordingAudioActive.ENABLE ? true : false,
181
194
  });
@@ -201,10 +214,10 @@ export default class NestCamera extends HomeKitDevice {
201
214
  }
202
215
 
203
216
  if (
204
- (this.deviceData.streaming_protocols.includes(PROTOCOLWEBRTC) === false &&
205
- this.deviceData.streaming_protocols.includes(PROTOCOLNEXUSTALK) === false) ||
206
- (this.deviceData.streaming_protocols.includes(PROTOCOLWEBRTC) === true && WebRTC === undefined) ||
207
- (this.deviceData.streaming_protocols.includes(PROTOCOLNEXUSTALK) === true && NexusTalk === undefined)
217
+ (this.deviceData.streaming_protocols.includes(STREAMING_PROTOCOL.WEBRTC) === false &&
218
+ this.deviceData.streaming_protocols.includes(STREAMING_PROTOCOL.NEXUSTALK) === false) ||
219
+ (this.deviceData.streaming_protocols.includes(STREAMING_PROTOCOL.WEBRTC) === true && WebRTC === undefined) ||
220
+ (this.deviceData.streaming_protocols.includes(STREAMING_PROTOCOL.NEXUSTALK) === true && NexusTalk === undefined)
208
221
  ) {
209
222
  this?.log?.error?.(
210
223
  'No suitable streaming protocol is present for "%s". Streaming and recording will be unavailable',
@@ -212,46 +225,37 @@ export default class NestCamera extends HomeKitDevice {
212
225
  );
213
226
  }
214
227
 
215
- // Setup linkage to EveHome app if configured todo so
216
- if (
217
- this.deviceData?.eveHistory === true &&
218
- typeof this.motionServices?.[1]?.service === 'object' &&
219
- typeof this.historyService?.linkToEveHome === 'function'
220
- ) {
221
- this.historyService.linkToEveHome(this.motionServices[1].service, {
222
- description: this.deviceData.description,
223
- });
224
- }
225
-
226
228
  // Extra setup details for output
227
- this.deviceData.hksv === true &&
228
- this.postSetupDetail('HomeKit Secure Video support' + (this.streamer?.isBuffering() === true ? ' and recording buffer started' : ''));
229
- this.deviceData.localAccess === true && this.postSetupDetail('Local access');
229
+ this.deviceData.streaming_protocols.includes(STREAMING_PROTOCOL.WEBRTC) === true &&
230
+ WebRTC !== undefined &&
231
+ this.postSetupDetail('WebRTC streamer', 'debug');
232
+ this.deviceData.streaming_protocols.includes(STREAMING_PROTOCOL.NEXUSTALK) === true &&
233
+ NexusTalk !== undefined &&
234
+ this.postSetupDetail('NexusTalk streamer', 'debug');
235
+ this.deviceData.hksv === true && this.postSetupDetail('HomeKit Secure Video support');
236
+ this.deviceData.localAccess === true && this.postSetupDetail('Local network access');
237
+ this.deviceData.ffmpeg.hwaccel === true && this.postSetupDetail('Video hardware acceleration');
230
238
  }
231
239
 
232
- removeDevice() {
240
+ onRemove() {
233
241
  // Clean up our camera object since this device is being removed
234
242
  clearTimeout(this.motionTimer);
235
243
  clearTimeout(this.personTimer);
236
- clearTimeout(this.snapshotTimer);
244
+ clearTimeout(this.#snapshotTimer);
237
245
  this.motionTimer = undefined;
238
246
  this.personTimer = undefined;
239
- this.snapshotTimer = undefined;
247
+ this.#snapshotTimer = undefined;
240
248
 
241
- this.streamer !== undefined && this.streamer.stopEverything();
249
+ // Stop all streamer logic (buffering, output, etc)
250
+ this.streamer?.stopEverything?.();
251
+
252
+ // Terminate any remaining ffmpeg sessions for this camera/doorbell
253
+ this.ffmpeg?.killAllSessions?.(this.uuid);
242
254
 
243
255
  // Stop any on-going HomeKit sessions, either live or recording
244
- // We'll terminate any ffmpeg, rtpSpliter etc processes
245
- this.#hkSessions.forEach((session) => {
246
- if (typeof session.rtpSplitter?.close === 'function') {
247
- session.rtpSplitter.close();
248
- }
249
- session.ffmpeg.forEach((ffmpeg) => {
250
- ffmpeg.kill('SIGKILL');
251
- });
252
- if (session?.eventEmitter instanceof EventEmitter === true) {
253
- session.eventEmitter.removeAllListeners(MP4BOX);
254
- }
256
+ // We'll terminate any ffmpeg, rtpSplitter etc processes
257
+ this.#liveSessions?.forEach?.((session) => {
258
+ session?.rtpSplitter?.close?.();
255
259
  });
256
260
 
257
261
  // Remove any motion services we created
@@ -263,344 +267,616 @@ export default class NestCamera extends HomeKitDevice {
263
267
  // Remove the camera controller
264
268
  this.accessory.removeController(this.controller);
265
269
 
270
+ // Clear references
266
271
  this.operatingModeService = undefined;
267
- this.#hkSessions = undefined;
272
+ this.#liveSessions = undefined;
268
273
  this.motionServices = undefined;
269
274
  this.streamer = undefined;
270
275
  this.controller = undefined;
271
276
  }
272
277
 
273
- // Taken and adapted from:
274
- // https://github.com/hjdhjd/homebridge-unifi-protect/blob/eee6a4e379272b659baa6c19986d51f5bf2cbbbc/src/protect-ffmpeg-record.ts
275
- async *handleRecordingStreamRequest(sessionID) {
276
- if (this.deviceData?.ffmpeg?.binary === undefined) {
277
- this?.log?.warn?.(
278
- 'Received request to start recording for "%s" however we do not have an ffmpeg binary present',
279
- this.deviceData.description,
280
- );
281
- return;
282
- }
283
-
284
- if (
285
- this.motionServices?.[1]?.service !== undefined &&
286
- this.motionServices[1].service.getCharacteristic(this.hap.Characteristic.MotionDetected).value === false
287
- ) {
288
- // Should only be recording if motion detected.
289
- // Sometimes when starting up, HAP-nodeJS or HomeKit triggers this even when motion isn't occuring
290
- this?.log?.debug?.('Received request to commence recording for "%s" however we have not detected any motion');
291
- return;
292
- }
293
-
294
- if (this.streamer === undefined) {
295
- this?.log?.error?.(
296
- 'Received request to start recording for "%s" however we do not any associated streaming protocol support',
297
- this.deviceData.description,
298
- );
278
+ async onUpdate(deviceData) {
279
+ if (typeof deviceData !== 'object' || this.controller === undefined) {
299
280
  return;
300
281
  }
301
282
 
302
- // Build our ffmpeg command string for the liveview video/audio stream
303
- let commandLine = [
304
- '-hide_banner',
305
- '-nostats',
306
- '-use_wallclock_as_timestamps 1',
307
- '-fflags +discardcorrupt',
308
- '-max_delay 500000',
309
- '-flags low_delay',
310
- '-f h264',
311
- '-i pipe:0',
312
- ];
313
-
314
- let includeAudio = false;
315
- if (
316
- this.deviceData.audio_enabled === true &&
317
- this.controller.recordingManagement.recordingManagementService.getCharacteristic(this.hap.Characteristic.RecordingAudioActive)
318
- .value === this.hap.Characteristic.RecordingAudioActive.ENABLE &&
319
- this.streamer?.codecs?.audio === 'aac' &&
320
- this.deviceData?.ffmpeg?.libfdk_aac === true
321
- ) {
322
- // Audio data only on extra pipe created in spawn command
323
- commandLine.push('-f aac -i pipe:3');
324
- includeAudio = true;
325
- }
326
-
327
- if (
328
- this.deviceData.audio_enabled === true &&
329
- this.controller.recordingManagement.recordingManagementService.getCharacteristic(this.hap.Characteristic.RecordingAudioActive)
330
- .value === this.hap.Characteristic.RecordingAudioActive.ENABLE &&
331
- this.streamer?.codecs?.audio === 'opus' &&
332
- this.deviceData?.ffmpeg?.libopus === true
333
- ) {
334
- // Audio data only on extra pipe created in spawn command
335
- commandLine.push('-i pipe:3');
336
- includeAudio = true;
283
+ if (this.deviceData.migrating === false && deviceData.migrating === true) {
284
+ // Migration happening between Nest <-> Google Home apps. We'll stop any active streams, close the current streaming object
285
+ this?.log?.warn?.('Migration between Nest <-> Google Home apps has started for "%s"', deviceData.description);
286
+ this.streamer?.stopEverything?.();
287
+ this.streamer = undefined;
337
288
  }
338
289
 
339
- // Build our video command for ffmpeg
340
- commandLine.push(
341
- '-map 0:v:0',
342
- '-codec:v libx264',
343
- '-preset veryfast',
344
- '-profile:v ' +
345
- (this.#recordingConfig.videoCodec.parameters.profile === this.hap.H264Profile.HIGH
346
- ? 'high'
347
- : this.#recordingConfig.videoCodec.parameters.profile === this.hap.H264Profile.MAIN
348
- ? 'main'
349
- : 'baseline'),
350
- '-level:v ' +
351
- (this.#recordingConfig.videoCodec.parameters.level === this.hap.H264Level.LEVEL4_0
352
- ? '4.0'
353
- : this.#recordingConfig.videoCodec.parameters.level === this.hap.H264Level.LEVEL3_2
354
- ? '3.2'
355
- : '3.1'),
356
- '-noautoscale',
357
- '-bf 0',
358
- '-filter:v fps=fps=' + this.#recordingConfig.videoCodec.resolution[2],
359
- '-g:v ' + (this.#recordingConfig.videoCodec.resolution[2] * this.#recordingConfig.videoCodec.parameters.iFrameInterval) / 1000,
360
- '-b:v ' + this.#recordingConfig.videoCodec.parameters.bitRate + 'k',
361
- '-bufsize ' + 2 * this.#recordingConfig.videoCodec.parameters.bitRate + 'k',
362
- '-fps_mode passthrough',
363
- '-reset_timestamps 1',
364
- '-video_track_timescale 90000',
365
- '-movflags frag_keyframe+empty_moov+default_base_moof',
366
- );
367
-
368
- // We have seperate video and audio streams that need to be muxed together if audio enabled
369
- if (includeAudio === true) {
370
- let audioSampleRates = ['8', '16', '24', '32', '44.1', '48'];
371
-
372
- commandLine.push(
373
- '-map 1:a:0',
374
- '-codec:a libfdk_aac',
375
- '-profile:a aac_low',
376
- '-ar ' + audioSampleRates[this.#recordingConfig.audioCodec.samplerate] + 'k',
377
- '-b:a ' + this.#recordingConfig.audioCodec.bitrate + 'k',
378
- '-ac ' + this.#recordingConfig.audioCodec.audioChannels,
379
- );
290
+ if (this.deviceData.migrating === true && deviceData.migrating === false) {
291
+ // Migration has completed between Nest <-> Google Home apps
292
+ this?.log?.success?.('Migration between Nest <-> Google Home apps has completed for "%s"', deviceData.description);
380
293
  }
381
294
 
382
- commandLine.push('-f mp4 pipe:1'); // output to stdout in mp4
383
-
384
- // Start our ffmpeg recording process and stream from our streamer
385
- // video is pipe #1
386
- // audio is pipe #3 if including audio
387
- this?.log?.debug?.(
388
- 'ffmpeg process for recording stream from "%s" will be called using the following commandline',
389
- this.deviceData.description,
390
- commandLine.join(' ').toString(),
391
- );
392
- let ffmpegRecording = child_process.spawn(this.deviceData.ffmpeg.binary, commandLine.join(' ').split(' '), {
393
- env: process.env,
394
- stdio: ['pipe', 'pipe', 'pipe', 'pipe'],
395
- });
396
-
397
- // Process FFmpeg output and parse out the fMP4 stream it's generating for HomeKit Secure Video.
398
- let mp4FragmentData = [];
399
- let mp4boxes = [];
400
- let eventEmitter = new EventEmitter();
401
-
402
- ffmpegRecording.stdout.on('data', (data) => {
403
- // Process the mp4 data from our socket connection and convert into mp4 fragment boxes we need
404
- mp4FragmentData = mp4FragmentData.length === 0 ? data : Buffer.concat([mp4FragmentData, data]);
405
- while (mp4FragmentData.length >= 8) {
406
- let boxSize = mp4FragmentData.slice(0, 4).readUInt32BE(0); // Includes header and data size
407
-
408
- if (mp4FragmentData.length < boxSize) {
409
- // We dont have enough data in the buffer yet to process the full mp4 box
410
- // so, exit loop and await more data
411
- break;
412
- }
413
-
414
- // Add it to our queue to be pushed out through the generator function.
415
- if (Array.isArray(mp4boxes) === true && eventEmitter !== undefined) {
416
- mp4boxes.push({
417
- header: mp4FragmentData.slice(0, 8),
418
- type: mp4FragmentData.slice(4, 8).toString(),
419
- data: mp4FragmentData.slice(8, boxSize),
420
- });
421
- eventEmitter.emit(MP4BOX);
295
+ // Handle case of changes in streaming protocols OR just finished migration between Nest <-> Google Home apps
296
+ if (this.streamer === undefined && deviceData.migrating === false) {
297
+ if (JSON.stringify(deviceData.streaming_protocols) !== JSON.stringify(this.deviceData.streaming_protocols)) {
298
+ this?.log?.warn?.('Available streaming protocols have changed for "%s"', deviceData.description);
299
+ this.streamer?.stopEverything?.();
300
+ this.streamer = undefined;
301
+ }
302
+ if (deviceData.streaming_protocols.includes(STREAMING_PROTOCOL.WEBRTC) === true && WebRTC !== undefined) {
303
+ if (this.deviceData.migrating === true && deviceData.migrating === false) {
304
+ this?.log?.debug?.('Using WebRTC streamer for "%s" after migration', deviceData.description);
422
305
  }
423
306
 
424
- // Remove the section of data we've just processed from our buffer
425
- mp4FragmentData = mp4FragmentData.slice(boxSize);
307
+ this.streamer = new WebRTC(this.uuid, deviceData, {
308
+ log: this.log,
309
+ });
426
310
  }
427
- });
428
311
 
429
- ffmpegRecording.on('exit', (code, signal) => {
430
- if (signal !== 'SIGKILL' || signal === null) {
431
- this?.log?.error?.('ffmpeg recording process for "%s" stopped unexpectedly. Exit code was "%s"', this.deviceData.description, code);
432
- }
312
+ if (deviceData.streaming_protocols.includes(STREAMING_PROTOCOL.NEXUSTALK) === true && NexusTalk !== undefined) {
313
+ if (this.deviceData.migrating === true && deviceData.migrating === false) {
314
+ this?.log?.debug?.('Using NexusTalk streamer for "%s" after migration', deviceData.description);
315
+ }
433
316
 
434
- if (this.#hkSessions?.[sessionID] !== undefined) {
435
- delete this.#hkSessions[sessionID];
317
+ this.streamer = new NexusTalk(this.uuid, deviceData, {
318
+ log: this.log,
319
+ });
436
320
  }
437
- });
438
-
439
- // eslint-disable-next-line no-unused-vars
440
- ffmpegRecording.on('error', (error) => {
441
- // Empty
442
- });
443
-
444
- // ffmpeg console output is via stderr
445
- ffmpegRecording.stderr.on('data', (data) => {
446
- if (data.toString().includes('frame=') === false && this.deviceData?.ffmpeg?.debug === true) {
447
- // Monitor ffmpeg output
448
- this?.log?.debug?.(data.toString());
321
+ if (
322
+ this?.streamer?.isBuffering() === false &&
323
+ deviceData?.hksv === true &&
324
+ this?.controller?.recordingManagement?.recordingManagementService !== undefined &&
325
+ this.controller.recordingManagement.recordingManagementService.getCharacteristic(this.hap.Characteristic.Active).value ===
326
+ this.hap.Characteristic.Active.ACTIVE
327
+ ) {
328
+ await this.message(Streamer.MESSAGE, Streamer.MESSAGE_TYPE.START_BUFFER);
449
329
  }
450
- });
330
+ }
451
331
 
452
- // Start the appropriate streamer
453
- this.streamer !== undefined &&
454
- this.streamer.startRecordStream(sessionID, ffmpegRecording.stdin, ffmpegRecording?.stdio?.[3] ? ffmpegRecording.stdio[3] : null);
332
+ // Check to see if any activity zones were added for both non-HKSV and HKSV enabled devices
333
+ if (
334
+ Array.isArray(deviceData.activity_zones) === true &&
335
+ JSON.stringify(deviceData.activity_zones) !== JSON.stringify(this.deviceData.activity_zones)
336
+ ) {
337
+ deviceData.activity_zones.forEach((zone) => {
338
+ if (this.deviceData.hksv === false || (this.deviceData.hksv === true && this.ffmpeg instanceof FFmpeg === true && zone.id === 1)) {
339
+ if (this.motionServices?.[zone.id]?.service === undefined) {
340
+ // Zone doesn't have an associated motion sensor, so add one
341
+ let zoneName = zone.id === 1 ? '' : zone.name;
342
+ let eveOptions = zone.id === 1 ? {} : undefined; // Only link EveHome for zone 1
455
343
 
456
- // Store our ffmpeg sessions
457
- this.#hkSessions[sessionID] = {};
458
- this.#hkSessions[sessionID].eventEmitter = eventEmitter;
459
- this.#hkSessions[sessionID].ffmpeg = ffmpegRecording; // Store ffmpeg process ID
344
+ let tempService = this.addHKService(this.hap.Service.MotionSensor, zoneName, zone.id, eveOptions);
460
345
 
461
- this?.log?.info?.('Started recording from "%s" %s', this.deviceData.description, includeAudio === false ? 'without audio' : '');
346
+ this.addHKCharacteristic(tempService, this.hap.Characteristic.Active);
347
+ tempService.updateCharacteristic(this.hap.Characteristic.Name, zoneName);
348
+ tempService.updateCharacteristic(this.hap.Characteristic.MotionDetected, false); // No motion initially
462
349
 
463
- // Loop generating MOOF/MDAT box pairs for HomeKit Secure Video.
464
- // HAP-NodeJS cancels this async generator function when recording completes also
465
- let segment = [];
466
- for (;;) {
467
- if (this.#hkSessions?.[sessionID] === undefined || this.#hkSessions?.[sessionID]?.ffmpeg === undefined) {
468
- // Our session object is not present
469
- // ffmpeg recorder process is not present
470
- // so finish up the loop
471
- break;
472
- }
350
+ this.motionServices[zone.id] = { service: tempService, timer: undefined };
351
+ }
352
+ }
353
+ });
354
+ }
473
355
 
474
- if (mp4boxes?.length === 0 && eventEmitter !== undefined) {
475
- // since the ffmpeg recorder process hasn't notified us of any mp4 fragment boxes, wait until there are some
476
- await EventEmitter.once(eventEmitter, MP4BOX);
477
- }
356
+ // Check to see if any activity zones were removed for both non-HKSV and HKSV enabled devices
357
+ // We'll also update the online status of the camera in the motion service here
358
+ Object.entries(this.motionServices).forEach(([zoneID, service]) => {
359
+ // Set online status
360
+ service.service.updateCharacteristic(
361
+ this.hap.Characteristic.Active,
362
+ deviceData.online === true ? this.hap.Characteristic.Active.ACTIVE : this.hap.Characteristic.Active.INACTIVE,
363
+ );
478
364
 
479
- let mp4box = mp4boxes.shift();
480
- if (typeof mp4box !== 'object') {
481
- // Not an mp4 fragment box, so try again
482
- continue;
365
+ // Handle deleted zones (excluding zone ID 1 for HKSV)
366
+ if (
367
+ zoneID !== '1' &&
368
+ Array.isArray(deviceData.activity_zones) === true &&
369
+ deviceData.activity_zones.findIndex(({ id }) => id === Number(zoneID)) === -1
370
+ ) {
371
+ // Motion service we created doesn't appear in zone list anymore, so assume deleted
372
+ this.accessory.removeService(service.service);
373
+ delete this.motionServices[zoneID];
483
374
  }
375
+ });
484
376
 
485
- // Queue up this fragment mp4 segment
486
- segment.push(mp4box.header, mp4box.data);
377
+ if (this.operatingModeService !== undefined) {
378
+ // Update camera off/on status
379
+ this.operatingModeService.updateCharacteristic(
380
+ this.hap.Characteristic.ManuallyDisabled,
381
+ deviceData.streaming_enabled === false
382
+ ? this.hap.Characteristic.ManuallyDisabled.DISABLED
383
+ : this.hap.Characteristic.ManuallyDisabled.ENABLED,
384
+ );
487
385
 
488
- if (mp4box.type === 'moov' || mp4box.type === 'mdat') {
489
- yield { data: Buffer.concat(segment), isLast: false };
490
- segment = [];
386
+ if (deviceData?.has_statusled === true) {
387
+ // Set camera recording indicator. This cannot be turned off on Nest Cameras/Doorbells
388
+ // 0 = auto
389
+ // 1 = low
390
+ // 2 = high
391
+ this.operatingModeService.updateCharacteristic(
392
+ this.hap.Characteristic.CameraOperatingModeIndicator,
393
+ deviceData.statusled_brightness !== 1,
394
+ );
491
395
  }
492
- }
493
- }
494
-
495
- closeRecordingStream(sessionID, closeReason) {
496
- // Stop the associated recording stream
497
- this.streamer !== undefined && this.streamer.stopRecordStream(sessionID);
498
396
 
499
- if (typeof this.#hkSessions?.[sessionID] === 'object') {
500
- if (this.#hkSessions[sessionID]?.ffmpeg !== undefined) {
501
- // Kill the ffmpeg recorder process
502
- this.#hkSessions[sessionID].ffmpeg.kill('SIGKILL');
397
+ if (deviceData?.has_irled === true) {
398
+ // Set nightvision status in HomeKit
399
+ this.operatingModeService.updateCharacteristic(this.hap.Characteristic.NightVision, deviceData.irled_enabled);
503
400
  }
504
- if (this.#hkSessions[sessionID]?.eventEmitter !== undefined) {
505
- this.#hkSessions[sessionID].eventEmitter.emit(MP4BOX); // This will ensure we cleanly exit out from our segment generator
506
- this.#hkSessions[sessionID].eventEmitter.removeAllListeners(MP4BOX); // Tidy up our event listeners
401
+
402
+ if (deviceData?.has_video_flip === true) {
403
+ // Update image flip status
404
+ this.operatingModeService.updateCharacteristic(this.hap.Characteristic.ImageRotation, deviceData.video_flipped === true ? 180 : 0);
507
405
  }
508
- delete this.#hkSessions[sessionID];
509
406
  }
510
407
 
511
- // Log recording finished messages depending on reason
512
- if (closeReason === this.hap.HDSProtocolSpecificErrorReason.NORMAL) {
513
- this?.log?.info?.('Completed recording from "%s"', this.deviceData.description);
514
- } else {
515
- this?.log?.warn?.(
516
- 'Recording from "%s" completed with error. Reason was "%s"',
517
- this.deviceData.description,
518
- this.hap.HDSProtocolSpecificErrorReason[closeReason],
408
+ if (deviceData.hksv === true && this.controller?.recordingManagement?.recordingManagementService !== undefined) {
409
+ // Update recording audio status
410
+ this.controller.recordingManagement.recordingManagementService.updateCharacteristic(
411
+ this.hap.Characteristic.RecordingAudioActive,
412
+ deviceData.audio_enabled === true
413
+ ? this.hap.Characteristic.RecordingAudioActive.ENABLE
414
+ : this.hap.Characteristic.RecordingAudioActive.DISABLE,
519
415
  );
520
416
  }
521
- }
522
417
 
523
- updateRecordingActive(enableRecording) {
524
- if (enableRecording === true && this.streamer?.isBuffering() === false) {
525
- // Start a buffering stream for this camera/doorbell. Ensures motion captures all video on motion trigger
526
- // Required due to data delays by on prem Nest to cloud to HomeKit accessory to iCloud etc
527
- // Make sure have appropriate bandwidth!!!
528
- this?.log?.info?.('Recording was turned on for "%s"', this.deviceData.description);
529
- this.streamer.startBuffering();
530
- }
418
+ if (this.controller?.microphoneService !== undefined) {
419
+ // Update microphone volume if specified
420
+ //this.controller.microphoneService.updateCharacteristic(this.hap.Characteristic.Volume, deviceData.xxx);
531
421
 
532
- if (enableRecording === false && this.streamer?.isBuffering() === true) {
533
- this.streamer.stopBuffering();
534
- this?.log?.warn?.('Recording was turned off for "%s"', this.deviceData.description);
422
+ // if audio is disabled, we'll mute microphone
423
+ this.controller.setMicrophoneMuted(deviceData.audio_enabled === false ? true : false);
535
424
  }
536
- }
425
+ if (this.controller?.speakerService !== undefined) {
426
+ // Update speaker volume if specified
427
+ //this.controller.speakerService.updateCharacteristic(this.hap.Characteristic.Volume, deviceData.xxx);
537
428
 
538
- updateRecordingConfiguration(recordingConfig) {
539
- this.#recordingConfig = recordingConfig; // Store the recording configuration HKSV has provided
540
- }
429
+ // if audio is disabled, we'll mute speaker
430
+ this.controller.setSpeakerMuted(deviceData.audio_enabled === false ? true : false);
431
+ }
541
432
 
542
- async handleSnapshotRequest(snapshotRequestDetails, callback) {
543
- // snapshotRequestDetails.reason === ResourceRequestReason.PERIODIC
544
- // snapshotRequestDetails.reason === ResourceRequestReason.EVENT
433
+ // Process alerts, the most recent alert is first
434
+ // For HKSV, we're interested motion events
435
+ // For non-HKSV, we're interested motion, face and person events (maybe sound and package later)
436
+ deviceData.alerts.forEach((event) => {
437
+ if (
438
+ this.operatingModeService === undefined ||
439
+ (this.operatingModeService !== undefined &&
440
+ this.operatingModeService.getCharacteristic(this.hap.Characteristic.HomeKitCameraActive).value ===
441
+ this.hap.Characteristic.HomeKitCameraActive.ON)
442
+ ) {
443
+ // We're configured to handle camera events
444
+ // https://github.com/Supereg/secure-video-specification?tab=readme-ov-file#33-homekitcameraactive
545
445
 
546
- // Get current image from camera/doorbell
547
- let imageBuffer = undefined;
446
+ // Handle motion event
447
+ // For a HKSV enabled camera, we will use this to trigger the starting of the HKSV recording if the camera is active
448
+ if (event.types.includes('motion') === true) {
449
+ if (
450
+ this.motionTimer === undefined &&
451
+ (this.deviceData.hksv === false || this.ffmpeg instanceof FFmpeg === false || this.streamer === undefined)
452
+ ) {
453
+ this?.log?.info?.('Motion detected at "%s"', deviceData.description);
454
+ }
548
455
 
549
- if (this.deviceData.migrating === false && this.deviceData.streaming_enabled === true && this.deviceData.online === true) {
550
- let response = await this.get({ uuid: this.deviceData.nest_google_uuid, camera_snapshot: '' });
551
- if (Buffer.isBuffer(response?.camera_snapshot) === true) {
552
- imageBuffer = response.camera_snapshot;
553
- this.lastSnapshotImage = response.camera_snapshot;
456
+ event.zone_ids.forEach((zoneID) => {
457
+ if (
458
+ typeof this.motionServices?.[zoneID]?.service === 'object' &&
459
+ this.motionServices[zoneID].service.getCharacteristic(this.hap.Characteristic.MotionDetected).value !== true
460
+ ) {
461
+ // Trigger motion for matching zone of not aleady active
462
+ this.motionServices[zoneID].service.updateCharacteristic(this.hap.Characteristic.MotionDetected, true);
554
463
 
555
- // Keep this snapshot image cached for a certain period
556
- clearTimeout(this.snapshotTimer);
557
- this.snapshotTimer = setTimeout(() => {
558
- this.lastSnapshotImage = undefined;
559
- }, SNAPSHOTCACHETIMEOUT);
560
- }
561
- }
464
+ // Log motion started into history
465
+ this.history(this.motionServices[zoneID].service, {
466
+ status: 1,
467
+ });
468
+ }
469
+ });
470
+
471
+ // Clear any motion active timer so we can extend if more motion detected
472
+ clearTimeout(this.motionTimer);
473
+ this.motionTimer = setTimeout(() => {
474
+ event.zone_ids.forEach((zoneID) => {
475
+ if (typeof this.motionServices?.[zoneID]?.service === 'object') {
476
+ // Mark associted motion services as motion not detected
477
+ this.motionServices[zoneID].service.updateCharacteristic(this.hap.Characteristic.MotionDetected, false);
478
+
479
+ // Log motion started into history
480
+ this.history(this.motionServices[zoneID].service, { status: 0 });
481
+ }
482
+ });
483
+
484
+ this.motionTimer = undefined; // No motion timer active
485
+ }, this.deviceData.motionCooldown * 1000);
486
+ }
487
+
488
+ // Handle person/face event
489
+ // We also treat a 'face' event the same as a person event ie: if you have a face, you have a person
490
+ if (event.types.includes('person') === true || event.types.includes('face') === true) {
491
+ if (this.personTimer === undefined) {
492
+ // We don't have a person cooldown timer running, so we can process the 'person'/'face' event
493
+ if (this.deviceData.hksv === false || this.ffmpeg instanceof FFmpeg === false || this.streamer === undefined) {
494
+ // We'll only log a person detected event if HKSV is disabled
495
+ this?.log?.info?.('Person detected at "%s"', deviceData.description);
496
+ }
497
+
498
+ // Cooldown for person being detected
499
+ // Start this before we process further
500
+ this.personTimer = setTimeout(() => {
501
+ this.personTimer = undefined; // No person timer active
502
+ }, this.deviceData.personCooldown * 1000);
503
+
504
+ if (event.types.includes('motion') === false) {
505
+ // If person/face events doesn't include a motion event, add in here
506
+ // This will handle all the motion triggering stuff
507
+ event.types.push('motion');
508
+ }
509
+ }
510
+ }
511
+ }
512
+ });
513
+ }
514
+
515
+ onShutdown() {
516
+ // Stop all streamer logic (buffering, output, etc)
517
+ this.streamer?.stopEverything?.();
518
+
519
+ // Terminate any remaining ffmpeg sessions for this camera/doorbell
520
+ this.ffmpeg?.killAllSessions?.(this.uuid);
521
+
522
+ // Stop any on-going HomeKit sessions, either live or recording
523
+ // We'll terminate any ffmpeg, rtpSplitter etc processes
524
+ this.#liveSessions?.forEach?.((session) => {
525
+ session?.rtpSplitter?.close?.();
526
+ });
527
+ }
528
+
529
+ // Taken and adapted from:
530
+ // https://github.com/hjdhjd/homebridge-unifi-protect/blob/eee6a4e379272b659baa6c19986d51f5bf2cbbbc/src/protect-ffmpeg-record.ts
531
+ async *handleRecordingStreamRequest(sessionID) {
532
+ if (this.ffmpeg instanceof FFmpeg === false) {
533
+ // No valid ffmpeg binary present, so cannot do recording!!
534
+ this?.log?.warn?.(
535
+ 'Received request to start recording for "%s" however we do not have an ffmpeg binary present',
536
+ this.deviceData.description,
537
+ );
538
+ return;
539
+ }
540
+
541
+ if (this.streamer === undefined) {
542
+ this?.log?.error?.(
543
+ 'Received request to start recording for "%s" however we do not have any associated streaming protocol support',
544
+ this.deviceData.description,
545
+ );
546
+ return;
547
+ }
548
+
549
+ if (
550
+ this.motionServices?.[1]?.service !== undefined &&
551
+ this.motionServices[1].service.getCharacteristic(this.hap.Characteristic.MotionDetected).value === false
552
+ ) {
553
+ // Should only be recording if motion detected.
554
+ // Sometimes when starting up, HAP-nodeJS or HomeKit triggers this even when motion isn't occurring
555
+ this?.log?.debug?.(
556
+ 'Received request to commence recording for "%s" however we have not detected any motion',
557
+ this.deviceData.description,
558
+ );
559
+ return;
560
+ }
561
+
562
+ let includeAudio =
563
+ this.deviceData.audio_enabled === true &&
564
+ this.controller?.recordingManagement?.recordingManagementService?.getCharacteristic(this.hap.Characteristic.RecordingAudioActive)
565
+ ?.value === this.hap.Characteristic.RecordingAudioActive.ENABLE;
566
+
567
+ let commandLine = [
568
+ '-hide_banner',
569
+ '-nostats',
570
+ '-fflags',
571
+ '+discardcorrupt+genpts',
572
+ '-avoid_negative_ts',
573
+ 'make_zero',
574
+ '-max_delay',
575
+ '500000',
576
+ '-flags',
577
+ 'low_delay',
578
+
579
+ // Video input
580
+ '-f',
581
+ 'h264',
582
+ '-i',
583
+ 'pipe:0',
584
+
585
+ // Audio input (optional)
586
+ ...(includeAudio === true
587
+ ? this.streamer.codecs.audio === Streamer.CODEC_TYPE.PCM
588
+ ? ['-thread_queue_size', '512', '-f', 's16le', '-ar', '48000', '-ac', '2', '-i', 'pipe:3']
589
+ : this.streamer.codecs.audio === Streamer.CODEC_TYPE.AAC
590
+ ? ['-thread_queue_size', '512', '-f', 'aac', '-i', 'pipe:3']
591
+ : []
592
+ : []),
593
+
594
+ // Video output including hardware acceleration if available
595
+ '-map',
596
+ '0:v:0',
597
+ '-codec:v',
598
+ this.deviceData?.ffmpeg?.hwaccel === true && this.ffmpeg?.hardwareH264Codec !== undefined ? this.ffmpeg.hardwareH264Codec : 'libx264',
599
+ ...(this.deviceData?.ffmpeg?.hwaccel !== true || ['h264_nvenc', 'h264_qsv'].includes(this.ffmpeg?.hardwareH264Codec || '') === true
600
+ ? [
601
+ '-preset',
602
+ 'veryfast',
603
+ '-profile:v',
604
+ this.#recordingConfig.videoCodec.parameters.profile === this.hap.H264Profile.HIGH
605
+ ? 'high'
606
+ : this.#recordingConfig.videoCodec.parameters.profile === this.hap.H264Profile.MAIN
607
+ ? 'main'
608
+ : 'baseline',
609
+ '-level:v',
610
+ this.#recordingConfig.videoCodec.parameters.level === this.hap.H264Level.LEVEL4_0
611
+ ? '4.0'
612
+ : this.#recordingConfig.videoCodec.parameters.level === this.hap.H264Level.LEVEL3_2
613
+ ? '3.2'
614
+ : '3.1',
615
+ '-bf',
616
+ '0',
617
+ ]
618
+ : []),
619
+
620
+ '-filter:v',
621
+ 'fps=fps=' + this.#recordingConfig.videoCodec.resolution[2] + ',format=yuv420p',
622
+ '-fps_mode',
623
+ 'cfr',
624
+ '-g:v',
625
+ Math.round(
626
+ (this.#recordingConfig.videoCodec.resolution[2] * this.#recordingConfig.videoCodec.parameters.iFrameInterval) / 1000,
627
+ ).toString(),
628
+ '-b:v',
629
+ this.#recordingConfig.videoCodec.parameters.bitRate + 'k',
630
+ '-bufsize',
631
+ 2 * this.#recordingConfig.videoCodec.parameters.bitRate + 'k',
632
+ '-video_track_timescale',
633
+ '90000',
634
+ '-movflags',
635
+ 'frag_keyframe+empty_moov+default_base_moof',
636
+
637
+ // Audio output
638
+ ...(includeAudio === true
639
+ ? ['-map', '1:a:0', '-codec:a', 'libfdk_aac', '-profile:a', 'aac_low', '-ar', '16000', '-b:a', '16k', '-ac', '1']
640
+ : []),
641
+
642
+ '-f',
643
+ 'mp4',
644
+ 'pipe:1',
645
+ ];
646
+
647
+ // Start our ffmpeg recording process and stream from our streamer
648
+ // video is pipe #1
649
+ // audio is pipe #3 if including audio
650
+ this?.log?.debug?.(
651
+ 'ffmpeg process for recording stream from "%s" will be called using the following commandline',
652
+ this.deviceData.description,
653
+ commandLine.join(' ').toString(),
654
+ );
655
+
656
+ let ffmpegStream = this.ffmpeg.createSession(
657
+ this.uuid,
658
+ sessionID,
659
+ commandLine,
660
+ 'record',
661
+ this.deviceData.ffmpeg.debug === true
662
+ ? (data) => {
663
+ if (data.toString().includes('frame=') === false) {
664
+ this?.log?.debug?.(data.toString());
665
+ }
666
+ }
667
+ : undefined,
668
+ 4, // 4 pipes required
669
+ );
670
+
671
+ if (ffmpegStream === undefined) {
672
+ return;
673
+ }
674
+
675
+ let buffer = Buffer.alloc(0);
676
+ let mp4boxes = [];
677
+
678
+ ffmpegStream?.stdout?.on?.('data', (chunk) => {
679
+ buffer = Buffer.concat([buffer, chunk]);
680
+
681
+ while (buffer.length >= 8) {
682
+ let boxSize = buffer.readUInt32BE(0);
683
+ if (boxSize < 8 || buffer.length < boxSize) {
684
+ // We dont have enough data in the buffer yet to process the full mp4 box
685
+ // so, exit loop and await more data
686
+ break;
687
+ }
688
+
689
+ let boxType = buffer.subarray(4, 8).toString();
690
+
691
+ // Add it to our queue to be pushed out through the generator function.
692
+ mp4boxes.push({
693
+ header: buffer.subarray(0, 8),
694
+ type: boxType,
695
+ data: buffer.subarray(8, boxSize),
696
+ });
697
+
698
+ buffer = buffer.subarray(boxSize);
699
+ this.emit(MP4BOX);
700
+ }
701
+ });
702
+
703
+ ffmpegStream?.on?.('exit', (code, signal) => {
704
+ if (signal !== 'SIGKILL' || signal === null) {
705
+ this?.log?.error?.('ffmpeg recording process for "%s" stopped unexpectedly. Exit code was "%s"', this.deviceData.description, code);
706
+ }
707
+
708
+ // Ensure generator wakes up and exits
709
+ this.emit(MP4BOX);
710
+ this.removeAllListeners(MP4BOX);
711
+ });
712
+
713
+ // Start the appropriate streamer
714
+ let { video, audio } = await this.message(Streamer.MESSAGE, Streamer.MESSAGE_TYPE.START_RECORD, {
715
+ sessionID: sessionID,
716
+ });
717
+
718
+ // Connect the ffmpeg process to the streamer input/output
719
+ video?.pipe?.(ffmpegStream?.stdin); // Streamer video → ffmpeg stdin (pipe:0)
720
+ audio?.pipe?.(ffmpegStream?.stdio?.[3]); // Streamer audio → ffmpeg pipe:3
721
+
722
+ this?.log?.info?.('Started recording from "%s" %s', this.deviceData.description, includeAudio === false ? 'without audio' : '');
723
+
724
+ // Loop generating MOOF/MDAT box pairs for HomeKit Secure Video.
725
+ // HAP-NodeJS cancels this async generator function when recording completes also
726
+ let segment = [];
727
+ for (;;) {
728
+ if (this.ffmpeg?.hasSession?.(this.uuid, sessionID, 'record') === false) {
729
+ break;
730
+ }
731
+
732
+ if (mp4boxes.length === 0) {
733
+ await EventEmitter.once(this, MP4BOX);
734
+
735
+ if (this.ffmpeg?.hasSession?.(this.uuid, sessionID, 'record') === false) {
736
+ break;
737
+ }
738
+
739
+ if (mp4boxes.length === 0) {
740
+ continue;
741
+ }
742
+ }
743
+
744
+ let box = mp4boxes.shift();
745
+ if (box === undefined) {
746
+ continue;
747
+ }
748
+
749
+ segment.push(box.header, box.data);
750
+
751
+ if (box.type === 'moov' || box.type === 'mdat') {
752
+ yield { data: Buffer.concat(segment), isLast: false };
753
+ segment = [];
754
+ }
755
+ }
756
+ }
757
+
758
+ async closeRecordingStream(sessionID, closeReason) {
759
+ // Stop recording stream from the streamer
760
+ await this.message(Streamer.MESSAGE, Streamer.MESSAGE_TYPE.STOP_RECORD, {
761
+ sessionID: sessionID,
762
+ });
763
+
764
+ // Terminate the ffmpeg recording process
765
+ this.ffmpeg?.killSession?.(this.uuid, sessionID, 'record', 'SIGKILL');
766
+
767
+ // Wake and clear HomeKit Secure Video generator
768
+ this.emit(MP4BOX);
769
+ this.removeAllListeners(MP4BOX);
770
+
771
+ // Log completion depending on reason
772
+ if (closeReason === this.hap.HDSProtocolSpecificErrorReason.NORMAL) {
773
+ this?.log?.info?.('Completed recording from "%s"', this.deviceData.description);
774
+ } else {
775
+ this?.log?.warn?.(
776
+ 'Recording from "%s" completed with error. Reason was "%s"',
777
+ this.deviceData.description,
778
+ this.hap.HDSProtocolSpecificErrorReason?.[closeReason] || 'code ' + closeReason,
779
+ );
780
+ }
781
+ }
782
+
783
+ async updateRecordingActive(enableRecording) {
784
+ if (this.streamer === undefined) {
785
+ return;
786
+ }
787
+
788
+ if (enableRecording === true && this.streamer.isBuffering() === false) {
789
+ // Start a buffering stream for this camera/doorbell. Ensures motion captures all video on motion trigger
790
+ // Required due to data delays by on prem Nest to cloud to HomeKit accessory to iCloud etc
791
+ // Make sure have appropriate bandwidth!!!
792
+ this?.log?.info?.('Recording was turned on for "%s"', this.deviceData.description);
793
+ await this.message(Streamer.MESSAGE, Streamer.MESSAGE_TYPE.START_BUFFER);
794
+ }
795
+
796
+ if (enableRecording === false && this.streamer.isBuffering() === true) {
797
+ // Stop buffering stream for this camera/doorbell
798
+ await this.message(Streamer.MESSAGE, Streamer.MESSAGE_TYPE.STOP_BUFFER);
799
+ this?.log?.warn?.('Recording was turned off for "%s"', this.deviceData.description);
800
+ }
801
+ }
802
+
803
+ updateRecordingConfiguration(recordingConfig) {
804
+ this.#recordingConfig = recordingConfig; // Store the recording configuration HKSV has provided
805
+ }
806
+
807
+ async handleSnapshotRequest(snapshotRequestDetails, callback) {
808
+ // snapshotRequestDetails.reason === ResourceRequestReason.PERIODIC
809
+ // snapshotRequestDetails.reason === ResourceRequestReason.EVENT
810
+
811
+ // eslint-disable-next-line no-unused-vars
812
+ const isLikelyBlackImage = (buffer) => {
813
+ // TODO <- Placeholder for actual black image detection logic
814
+ return false;
815
+ };
816
+
817
+ // Get current image from camera/doorbell
818
+ let imageBuffer = undefined;
819
+
820
+ if (this.deviceData.migrating === false && this.deviceData.streaming_enabled === true && this.deviceData.online === true) {
821
+ // Call the camera/doorbell to get a snapshot image.
822
+ // Prefer onGet() result if implemented; fallback to static handler
823
+ let response = await this.get({ uuid: this.deviceData.nest_google_uuid, camera_snapshot: Buffer.alloc(0) });
824
+ if (
825
+ Buffer.isBuffer(response?.camera_snapshot) === true &&
826
+ response.camera_snapshot.length > 0 &&
827
+ isLikelyBlackImage(response.camera_snapshot) === false
828
+ ) {
829
+ imageBuffer = response.camera_snapshot;
830
+ this.#lastSnapshotImage = response.camera_snapshot;
831
+
832
+ // Keep this snapshot image cached for a certain period
833
+ clearTimeout(this.#snapshotTimer);
834
+ this.#snapshotTimer = setTimeout(() => {
835
+ this.#lastSnapshotImage = undefined;
836
+ }, TIMERS.SNAPSHOT);
837
+ }
838
+ }
562
839
 
563
840
  if (
564
841
  this.deviceData.migrating === false &&
565
842
  this.deviceData.streaming_enabled === false &&
566
843
  this.deviceData.online === true &&
567
- this.#cameraVideoOffImage !== undefined
844
+ this.#cameraImages?.off !== undefined
568
845
  ) {
569
846
  // Return 'camera switched off' jpg to image buffer
570
- imageBuffer = this.#cameraVideoOffImage;
847
+ imageBuffer = this.#cameraImages.off;
571
848
  }
572
849
 
573
- if (this.deviceData.migrating === false && this.deviceData.online === false && this.#cameraOfflineImage !== undefined) {
850
+ if (this.deviceData.migrating === false && this.deviceData.online === false && this.#cameraImages?.offline !== undefined) {
574
851
  // Return 'camera offline' jpg to image buffer
575
- imageBuffer = this.#cameraOfflineImage;
852
+ imageBuffer = this.#cameraImages.offline;
576
853
  }
577
854
 
578
- if (this.deviceData.migrating === true && this.#cameraTransferringImage !== undefined) {
855
+ if (this.deviceData.migrating === true && this.#cameraImages?.transfer !== undefined) {
579
856
  // Return 'camera transferring' jpg to image buffer
580
- imageBuffer = this.#cameraTransferringImage;
857
+ imageBuffer = this.#cameraImages.transfer;
581
858
  }
582
859
 
583
860
  if (imageBuffer === undefined) {
584
861
  // If we get here, we have no snapshot image
585
862
  // We'll use the last success snapshop as long as its within a certain time period
586
- imageBuffer = this.lastSnapshotImage;
863
+ imageBuffer = this.#lastSnapshotImage;
587
864
  }
588
865
 
589
866
  callback(imageBuffer?.length === 0 ? 'Unable to obtain Camera/Doorbell snapshot' : null, imageBuffer);
590
867
  }
591
868
 
592
869
  async prepareStream(request, callback) {
593
- const getPort = async (options) => {
870
+ // HomeKit has asked us to prepare ports and encryption details for video/audio streaming
871
+
872
+ const getPort = async () => {
594
873
  return new Promise((resolve, reject) => {
595
- let server = net.createServer();
596
- server.unref();
597
- server.on('error', reject);
598
- server.listen(options, () => {
874
+ let server = dgram.createSocket('udp4');
875
+ server.bind({ port: 0, exclusive: true }, () => {
599
876
  let port = server.address().port;
600
- server.close(() => {
601
- resolve(port); // return port
602
- });
877
+ server.close(() => resolve(port));
603
878
  });
879
+ server.on('error', reject);
604
880
  });
605
881
  };
606
882
 
@@ -616,21 +892,15 @@ export default class NestCamera extends HomeKitDevice {
616
892
  audioPort: request.audio.port,
617
893
  localAudioPort: await getPort(),
618
894
  audioTalkbackPort: await getPort(),
619
- rptSplitterPort: await getPort(),
895
+ rtpSplitterPort: await getPort(),
620
896
  audioCryptoSuite: request.video.srtpCryptoSuite,
621
897
  audioSRTP: Buffer.concat([request.audio.srtp_key, request.audio.srtp_salt]),
622
898
  audioSSRC: this.hap.CameraController.generateSynchronisationSource(),
623
899
 
624
- rtpSplitter: null,
625
- ffmpeg: [], // Array of ffmpeg processes we create for streaming video/audio and audio talkback
626
- video: null,
627
- audio: null,
900
+ rtpSplitter: null, // setup later during stream start
628
901
  };
629
902
 
630
- // Build response back to HomeKit with the details filled out
631
-
632
- // Dropped ip module by using small snippet of code below
633
- // Converts ipv4 mapped into ipv6 address into pure ipv4
903
+ // Converts ipv4-mapped ipv6 into pure ipv4
634
904
  if (request.addressVersion === 'ipv4' && request.sourceAddress.startsWith('::ffff:') === true) {
635
905
  request.sourceAddress = request.sourceAddress.replace('::ffff:', '');
636
906
  }
@@ -644,272 +914,251 @@ export default class NestCamera extends HomeKitDevice {
644
914
  srtp_salt: request.video.srtp_salt,
645
915
  },
646
916
  audio: {
647
- port: sessionInfo.rptSplitterPort,
917
+ port: sessionInfo.rtpSplitterPort,
648
918
  ssrc: sessionInfo.audioSSRC,
649
919
  srtp_key: request.audio.srtp_key,
650
920
  srtp_salt: request.audio.srtp_salt,
651
921
  },
652
922
  };
653
- this.#hkSessions[request.sessionID] = sessionInfo; // Store the session information
923
+
924
+ this.#liveSessions.set(request.sessionID, sessionInfo); // Store the session information
654
925
  callback(undefined, response);
655
926
  }
656
927
 
657
928
  async handleStreamRequest(request, callback) {
658
929
  // called when HomeKit asks to start/stop/reconfigure a camera/doorbell live stream
659
- if (request.type === this.hap.StreamRequestTypes.START && this.streamer === undefined) {
660
- // We have no streamer object configured, so cannot do live streams!!
661
- this?.log?.error?.(
662
- 'Received request to start live video for "%s" however we do not any associated streaming protocol support',
663
- this.deviceData.description,
664
- );
665
- }
666
930
 
667
- if (request.type === this.hap.StreamRequestTypes.START && this.deviceData?.ffmpeg?.binary === undefined) {
668
- // No ffmpeg binary present, so cannot do live streams!!
669
- this?.log?.warn?.(
670
- 'Received request to start live video for "%s" however we do not have an ffmpeg binary present',
671
- this.deviceData.description,
672
- );
673
- }
931
+ if (request.type === this.hap.StreamRequestTypes.START) {
932
+ if (this.streamer === undefined) {
933
+ // We have no streamer object configured, so cannot do live streams!!
934
+ this?.log?.error?.(
935
+ 'Received request to start live video for "%s" however we do not have any associated streaming protocol support',
936
+ this.deviceData.description,
937
+ );
938
+ return callback?.();
939
+ }
940
+
941
+ if (this.ffmpeg instanceof FFmpeg === false) {
942
+ // No valid ffmpeg binary present, so cannot do live streams!!
943
+ this?.log?.warn?.(
944
+ 'Received request to start live video for "%s" however we do not have a valid ffmpeg binary',
945
+ this.deviceData.description,
946
+ );
947
+ return callback?.();
948
+ }
949
+
950
+ let session = this.#liveSessions.get(request.sessionID);
951
+ let includeAudio = this.deviceData.audio_enabled === true && this.streamer?.codecs?.audio !== undefined;
674
952
 
675
- if (
676
- request.type === this.hap.StreamRequestTypes.START &&
677
- this.streamer !== undefined &&
678
- this.deviceData?.ffmpeg?.binary !== undefined
679
- ) {
680
953
  // Build our ffmpeg command string for the liveview video/audio stream
681
954
  let commandLine = [
682
955
  '-hide_banner',
683
956
  '-nostats',
684
- '-use_wallclock_as_timestamps 1',
685
- '-fflags +discardcorrupt',
686
- '-max_delay 500000',
687
- '-flags low_delay',
688
- '-f h264',
689
- '-i pipe:0',
957
+ '-use_wallclock_as_timestamps',
958
+ '1',
959
+ '-fflags',
960
+ '+discardcorrupt',
961
+ '-max_delay',
962
+ '500000',
963
+ '-flags',
964
+ 'low_delay',
965
+
966
+ // Video input
967
+ '-f',
968
+ 'h264',
969
+ '-i',
970
+ 'pipe:0',
971
+
972
+ // Audio input (if enabled)
973
+ ...(includeAudio === true
974
+ ? this.streamer.codecs.audio === Streamer.CODEC_TYPE.PCM
975
+ ? ['-thread_queue_size', '512', '-f', 's16le', '-ar', '48000', '-ac', '2', '-i', 'pipe:3']
976
+ : this.streamer.codecs.audio === Streamer.CODEC_TYPE.AAC
977
+ ? ['-thread_queue_size', '512', '-f', 'aac', '-i', 'pipe:3']
978
+ : []
979
+ : []),
980
+
981
+ // Video output
982
+ '-map',
983
+ '0:v:0',
984
+ '-codec:v',
985
+ 'copy',
986
+ // Below is comment out as we don't use hardware acceleration for live streaming
987
+ // ...(this.deviceData.ffmpeg.hwaccel === true && this.ffmpeg.hardwareH264Codec !== undefined
988
+ // ? ['-codec:v', this.ffmpeg.hardwareH264Codec]
989
+ // : ['-codec:v', 'copy']),
990
+ '-fps_mode',
991
+ 'passthrough',
992
+ '-reset_timestamps',
993
+ '1',
994
+ '-video_track_timescale',
995
+ '90000',
996
+ '-payload_type',
997
+ request.video.pt,
998
+ '-ssrc',
999
+ session.videoSSRC,
1000
+ '-f',
1001
+ 'rtp',
1002
+ '-srtp_out_suite',
1003
+ this.hap.SRTPCryptoSuites[session.videoCryptoSuite],
1004
+ '-srtp_out_params',
1005
+ session.videoSRTP.toString('base64'),
1006
+ 'srtp://' + session.address + ':' + session.videoPort + '?rtcpport=' + session.videoPort + '&pkt_size=' + request.video.mtu,
1007
+
1008
+ // Audio output (if enabled)
1009
+ ...(includeAudio === true
1010
+ ? request.audio.codec === this.hap.AudioStreamingCodecType.AAC_ELD
1011
+ ? ['-map', '1:a:0', '-codec:a', 'libfdk_aac', '-profile:a', 'aac_eld']
1012
+ : request.audio.codec === this.hap.AudioStreamingCodecType.OPUS
1013
+ ? [
1014
+ '-map',
1015
+ '1:a:0',
1016
+ '-codec:a',
1017
+ 'libopus',
1018
+ '-application',
1019
+ 'lowdelay',
1020
+ '-frame_duration',
1021
+ request.audio.packet_time.toString(),
1022
+ ]
1023
+ : []
1024
+ : []),
1025
+
1026
+ // Shared audio output params
1027
+ ...(includeAudio === true
1028
+ ? [
1029
+ '-flags',
1030
+ '+global_header',
1031
+ '-ar',
1032
+ request.audio.sample_rate.toString() + 'k',
1033
+ '-b:a',
1034
+ request.audio.max_bit_rate + 'k',
1035
+ '-ac',
1036
+ request.audio.channel.toString(),
1037
+ '-payload_type',
1038
+ request.audio.pt,
1039
+ '-ssrc',
1040
+ session.audioSSRC,
1041
+ '-f',
1042
+ 'rtp',
1043
+ '-srtp_out_suite',
1044
+ this.hap.SRTPCryptoSuites[session.audioCryptoSuite],
1045
+ '-srtp_out_params',
1046
+ session.audioSRTP.toString('base64'),
1047
+ 'srtp://' +
1048
+ session.address +
1049
+ ':' +
1050
+ session.audioPort +
1051
+ '?rtcpport=' +
1052
+ session.audioPort +
1053
+ '&localrtcpport=' +
1054
+ session.localAudioPort +
1055
+ '&pkt_size=188',
1056
+ ]
1057
+ : []),
690
1058
  ];
691
1059
 
692
- let includeAudio = false;
693
- if (
694
- this.deviceData.audio_enabled === true &&
695
- this.streamer?.codecs?.audio === 'aac' &&
696
- this.deviceData?.ffmpeg?.libfdk_aac === true
697
- ) {
698
- // Audio data only on extra pipe created in spawn command
699
- commandLine.push('-f aac -i pipe:3');
700
- includeAudio = true;
701
- }
702
-
703
- if (this.deviceData.audio_enabled === true && this.streamer?.codecs?.audio === 'opus' && this.deviceData?.ffmpeg?.libopus === true) {
704
- // Audio data only on extra pipe created in spawn command
705
- commandLine.push('-i pipe:3');
706
- includeAudio = true;
707
- }
708
-
709
- // Build our video command for ffmpeg
710
- commandLine.push(
711
- '-map 0:v:0',
712
- '-codec:v copy',
713
- '-fps_mode passthrough',
714
- '-reset_timestamps 1',
715
- '-video_track_timescale 90000',
716
- '-payload_type ' + request.video.pt,
717
- '-ssrc ' + this.#hkSessions[request.sessionID].videoSSRC,
718
- '-f rtp',
719
- '-srtp_out_suite ' + this.hap.SRTPCryptoSuites[this.#hkSessions[request.sessionID].videoCryptoSuite],
720
- '-srtp_out_params ' +
721
- this.#hkSessions[request.sessionID].videoSRTP.toString('base64') +
722
- ' srtp://' +
723
- this.#hkSessions[request.sessionID].address +
724
- ':' +
725
- this.#hkSessions[request.sessionID].videoPort +
726
- '?rtcpport=' +
727
- this.#hkSessions[request.sessionID].videoPort +
728
- '&pkt_size=' +
729
- request.video.mtu,
730
- );
731
-
732
- // We have seperate video and audio streams that need to be muxed together if audio enabled
733
- if (includeAudio === true) {
734
- if (request.audio.codec === this.hap.AudioStreamingCodecType.AAC_ELD) {
735
- commandLine.push('-map 1:a:0', '-codec:a libfdk_aac', '-profile:a aac_eld');
736
- }
737
-
738
- if (request.audio.codec === this.hap.AudioStreamingCodecType.OPUS) {
739
- commandLine.push(
740
- '-map 1:a:0',
741
- '-codec:a libopus',
742
- '-application lowdelay',
743
- '-frame_duration ' + request.audio.packet_time.toString(),
744
- );
745
- }
746
-
747
- commandLine.push(
748
- '-flags +global_header',
749
- '-ar ' + request.audio.sample_rate + 'k',
750
- '-b:a ' + request.audio.max_bit_rate + 'k',
751
- '-ac ' + request.audio.channel,
752
- '-payload_type ' + request.audio.pt,
753
- '-ssrc ' + this.#hkSessions[request.sessionID].audioSSRC,
754
- '-f rtp',
755
- '-srtp_out_suite ' + this.hap.SRTPCryptoSuites[this.#hkSessions[request.sessionID].audioCryptoSuite],
756
- '-srtp_out_params ' +
757
- this.#hkSessions[request.sessionID].audioSRTP.toString('base64') +
758
- ' srtp://' +
759
- this.#hkSessions[request.sessionID].address +
760
- ':' +
761
- this.#hkSessions[request.sessionID].audioPort +
762
- '?rtcpport=' +
763
- this.#hkSessions[request.sessionID].audioPort +
764
- '&localrtcpport=' +
765
- this.#hkSessions[request.sessionID].localAudioPort +
766
- '&pkt_size=188',
767
- );
768
- }
769
-
770
- // Start our ffmpeg streaming process and stream from our streamer
771
- // video is pipe #1
772
- // audio is pipe #3 if including audio
773
1060
  this?.log?.debug?.(
774
1061
  'ffmpeg process for live streaming from "%s" will be called using the following commandline',
775
1062
  this.deviceData.description,
776
1063
  commandLine.join(' ').toString(),
777
1064
  );
778
- let ffmpegStreaming = child_process.spawn(this.deviceData.ffmpeg.binary, commandLine.join(' ').split(' '), {
779
- env: process.env,
780
- stdio: ['pipe', 'pipe', 'pipe', 'pipe'],
781
- });
782
-
783
- ffmpegStreaming.on('exit', (code, signal) => {
784
- if (signal !== 'SIGKILL' || signal === null) {
785
- this?.log?.error?.(
786
- 'ffmpeg video/audio live streaming process for "%s" stopped unexpectedly. Exit code was "%s"',
787
- this.deviceData.description,
788
- code,
789
- );
790
1065
 
791
- // Clean up or streaming request, but calling it again with a 'STOP' reques
792
- this.handleStreamRequest({ type: this.hap.StreamRequestTypes.STOP, sessionID: request.sessionID }, null);
793
- }
794
- });
1066
+ // Launch the ffmpeg process for streaming and connect it to streamer input/output
1067
+ let ffmpegStream = this.ffmpeg.createSession(
1068
+ this.uuid,
1069
+ request.sessionID,
1070
+ commandLine,
1071
+ 'live',
1072
+ (data) => {
1073
+ if (data.toString().includes('frame=') === false && this.deviceData.ffmpeg.debug === true) {
1074
+ this?.log?.debug?.(data.toString());
1075
+ }
1076
+ },
1077
+ 4, // 4 pipes required
1078
+ );
795
1079
 
796
- // ffmpeg console output is via stderr
797
- ffmpegStreaming.stderr.on('data', (data) => {
798
- if (data.toString().includes('frame=') === false && this.deviceData?.ffmpeg?.debug === true) {
799
- // Monitor ffmpeg output
800
- this?.log?.debug?.(data.toString());
801
- }
802
- });
803
-
804
- // eslint-disable-next-line no-unused-vars
805
- ffmpegStreaming.on('error', (error) => {
806
- // Empty
807
- });
808
-
809
- // We only enable two/way audio on camera/doorbell if we have the required libraries in ffmpeg AND two-way/audio is enabled
810
- let ffmpegAudioTalkback = null; // No ffmpeg process for return audio yet
1080
+ // Two-way audio support if enabled and codecs available
1081
+ let ffmpegTalk = null;
811
1082
  if (
812
- ((this.streamer.codecs.talk === 'speex' && this.deviceData?.ffmpeg?.libspeex === true) ||
813
- (this.streamer.codecs.talk === 'opus' && this.deviceData?.ffmpeg?.libopus === true)) &&
814
- this.deviceData?.ffmpeg?.libfdk_aac === true &&
1083
+ ((this.streamer?.codecs?.talkback === Streamer.CODEC_TYPE.SPEEX &&
1084
+ this.ffmpeg?.features?.encoders?.includes('libspeex') === true) ||
1085
+ (this.streamer?.codecs?.talkback === Streamer.CODEC_TYPE.OPUS &&
1086
+ this.ffmpeg?.features?.encoders?.includes('libopus') === true)) &&
1087
+ this.ffmpeg?.features?.encoders?.includes('libfdk_aac') === true &&
815
1088
  this.deviceData.audio_enabled === true &&
816
1089
  this.deviceData.has_speaker === true &&
817
1090
  this.deviceData.has_microphone === true
818
1091
  ) {
819
- // Setup RTP splitter for two/away audio
820
- this.#hkSessions[request.sessionID].rtpSplitter = dgram.createSocket('udp4');
821
- this.#hkSessions[request.sessionID].rtpSplitter.bind(this.#hkSessions[request.sessionID].rptSplitterPort);
822
-
823
- this.#hkSessions[request.sessionID].rtpSplitter.on('error', () => {
824
- this.#hkSessions[request.sessionID].rtpSplitter.close();
825
- });
826
-
827
- this.#hkSessions[request.sessionID].rtpSplitter.on('message', (message) => {
828
- let payloadType = message.readUInt8(1) & 0x7f;
829
- if (payloadType === request.audio.pt) {
830
- // Audio payload type from HomeKit should match our payload type for audio
831
- if (message.length > 50) {
832
- // Only send on audio data if we have a longer audio packet.
833
- // (not sure it makes any difference, as under iOS 15 packets are roughly same length)
834
- this.#hkSessions[request.sessionID].rtpSplitter.send(message, this.#hkSessions[request.sessionID].audioTalkbackPort);
835
- }
1092
+ // Setup RTP splitter for two-way audio
1093
+ session.rtpSplitter = dgram.createSocket('udp4');
1094
+ session.rtpSplitter.bind(session.rtpSplitterPort);
1095
+ session.rtpSplitter.on('error', () => session.rtpSplitter.close());
1096
+ session.rtpSplitter.on('message', (message) => {
1097
+ let pt = message.readUInt8(1) & 0x7f;
1098
+ if (pt === request.audio.pt && message.length > 50) {
1099
+ session.rtpSplitter.send(message, session.audioTalkbackPort);
836
1100
  } else {
837
- this.#hkSessions[request.sessionID].rtpSplitter.send(message, this.#hkSessions[request.sessionID].localAudioPort);
838
- // Send RTCP to return audio as a heartbeat
839
- this.#hkSessions[request.sessionID].rtpSplitter.send(message, this.#hkSessions[request.sessionID].audioTalkbackPort);
1101
+ session.rtpSplitter.send(message, session.localAudioPort);
1102
+ session.rtpSplitter.send(message, session.audioTalkbackPort); // RTCP keepalive
840
1103
  }
841
1104
  });
842
1105
 
843
- // Build ffmpeg command
844
- let commandLine = [
845
- '-hide_banner -nostats',
846
- '-protocol_whitelist pipe,udp,rtp',
847
- '-f sdp',
848
- '-codec:a libfdk_aac',
849
- '-i pipe:0',
850
- '-map 0:a',
1106
+ let talkbackCommandLine = [
1107
+ '-hide_banner',
1108
+ '-nostats',
1109
+ '-protocol_whitelist',
1110
+ 'pipe,udp,rtp',
1111
+ '-f',
1112
+ 'sdp',
1113
+ '-codec:a',
1114
+ 'libfdk_aac',
1115
+ '-i',
1116
+ 'pipe:0',
1117
+ '-map',
1118
+ '0:a',
1119
+ ...(this.streamer?.codecs?.talkback === Streamer.CODEC_TYPE.SPEEX
1120
+ ? ['-codec:a', 'libspeex', '-frames_per_packet', '4', '-vad', '1', '-ac', '1', '-ar', '16k']
1121
+ : []),
1122
+ ...(this.streamer?.codecs?.talkback === Streamer.CODEC_TYPE.OPUS
1123
+ ? ['-codec:a', 'libopus', '-application', 'lowdelay', '-ac', '2', '-ar', '48k']
1124
+ : []),
1125
+ '-f',
1126
+ 'data',
1127
+ 'pipe:1',
851
1128
  ];
852
1129
 
853
- if (this.streamer.codecs.talk === 'speex') {
854
- commandLine.push('-codec:a libspeex', '-frames_per_packet 4', '-vad 1', '-ac 1', '-ar 16k');
855
- }
856
-
857
- if (this.streamer.codecs.talk === 'opus') {
858
- commandLine.push('-codec:a libopus', '-application lowdelay', '-ac 2', '-ar 48k');
859
- }
860
-
861
- commandLine.push('-f data pipe:1');
862
-
863
1130
  this?.log?.debug?.(
864
1131
  'ffmpeg process for talkback on "%s" will be called using the following commandline',
865
1132
  this.deviceData.description,
866
- commandLine.join(' ').toString(),
1133
+ talkbackCommandLine.join(' '),
867
1134
  );
868
- ffmpegAudioTalkback = child_process.spawn(this.deviceData.ffmpeg.binary, commandLine.join(' ').split(' '), {
869
- env: process.env,
870
- });
871
1135
 
872
- ffmpegAudioTalkback.on('exit', (code, signal) => {
873
- if (signal !== 'SIGKILL' || signal === null) {
874
- this?.log?.error?.(
875
- 'ffmpeg audio talkback streaming process for "%s" stopped unexpectedly. Exit code was "%s"',
876
- this.deviceData.description,
877
- code,
878
- );
879
-
880
- // Clean up or streaming request, but calling it again with a 'STOP' request
881
- this.handleStreamRequest({ type: this.hap.StreamRequestTypes.STOP, sessionID: request.sessionID }, null);
882
- }
883
- });
884
-
885
- // eslint-disable-next-line no-unused-vars
886
- ffmpegAudioTalkback.on('error', (error) => {
887
- // Empty
888
- });
889
-
890
- // ffmpeg console output is via stderr
891
- ffmpegAudioTalkback.stderr.on('data', (data) => {
892
- if (data.toString().includes('frame=') === false && this.deviceData?.ffmpeg?.debug === true) {
893
- // Monitor ffmpeg output
894
- this?.log?.debug?.(data.toString());
895
- }
896
- });
1136
+ ffmpegTalk = this.ffmpeg.createSession(
1137
+ this.uuid,
1138
+ request.sessionID,
1139
+ talkbackCommandLine,
1140
+ 'talk',
1141
+ (data) => {
1142
+ if (data.toString().includes('frame=') === false && this.deviceData.ffmpeg.debug === true) {
1143
+ this?.log?.debug?.(data.toString());
1144
+ }
1145
+ },
1146
+ 3, // 3 pipes required
1147
+ );
897
1148
 
898
- // Write out SDP configuration
899
- // Tried to align the SDP configuration to what HomeKit has sent us in its audio request details
900
- let sdpResponse = [
1149
+ let sdp = [
901
1150
  'v=0',
902
- 'o=- 0 0 IN ' + (this.#hkSessions[request.sessionID].ipv6 ? 'IP6' : 'IP4') + ' ' + this.#hkSessions[request.sessionID].address,
1151
+ 'o=- 0 0 IN ' + (session.ipv6 ? 'IP6' : 'IP4') + ' ' + session.address,
903
1152
  's=HomeKit Audio Talkback',
904
- 'c=IN ' + (this.#hkSessions[request.sessionID].ipv6 ? 'IP6' : 'IP4') + ' ' + this.#hkSessions[request.sessionID].address,
1153
+ 'c=IN ' + (session.ipv6 ? 'IP6' : 'IP4') + ' ' + session.address,
905
1154
  't=0 0',
906
- 'm=audio ' + this.#hkSessions[request.sessionID].audioTalkbackPort + ' RTP/AVP ' + request.audio.pt,
1155
+ 'm=audio ' + session.audioTalkbackPort + ' RTP/AVP ' + request.audio.pt,
907
1156
  'b=AS:' + request.audio.max_bit_rate,
908
1157
  'a=ptime:' + request.audio.packet_time,
909
1158
  ];
910
1159
 
911
1160
  if (request.audio.codec === this.hap.AudioStreamingCodecType.AAC_ELD) {
912
- sdpResponse.push(
1161
+ sdp.push(
913
1162
  'a=rtpmap:' + request.audio.pt + ' MPEG4-GENERIC/' + request.audio.sample_rate * 1000 + '/' + request.audio.channel,
914
1163
  'a=fmtp:' +
915
1164
  request.audio.pt +
@@ -918,309 +1167,46 @@ export default class NestCamera extends HomeKitDevice {
918
1167
  }
919
1168
 
920
1169
  if (request.audio.codec === this.hap.AudioStreamingCodecType.OPUS) {
921
- sdpResponse.push(
1170
+ sdp.push(
922
1171
  'a=rtpmap:' + request.audio.pt + ' opus/' + request.audio.sample_rate * 1000 + '/' + request.audio.channel,
923
- 'a=fmtp:' + request.audio.pt + ' minptime=10;useinbandfec=1"',
1172
+ 'a=fmtp:' + request.audio.pt + ' minptime=10;useinbandfec=1',
924
1173
  );
925
1174
  }
926
1175
 
927
- sdpResponse.push(
928
- 'a=crypto:1 ' +
929
- this.hap.SRTPCryptoSuites[this.#hkSessions[request.sessionID].audioCryptoSuite] +
930
- ' inline:' +
931
- this.#hkSessions[request.sessionID].audioSRTP.toString('base64'),
932
- );
933
-
934
- ffmpegAudioTalkback.stdin.write(sdpResponse.join('\r\n'));
935
- ffmpegAudioTalkback.stdin.end();
1176
+ sdp.push('a=crypto:1 ' + this.hap.SRTPCryptoSuites[session.audioCryptoSuite] + ' inline:' + session.audioSRTP.toString('base64'));
1177
+ ffmpegTalk?.stdin?.write?.(sdp.join('\r\n'));
1178
+ ffmpegTalk?.stdin?.end?.();
936
1179
  }
937
1180
 
938
- this?.log?.info?.(
939
- 'Live stream started on "%s" %s',
940
- this.deviceData.description,
941
- ffmpegAudioTalkback?.stdout ? 'with two-way audio' : '',
942
- );
943
-
944
- // Start the appropriate streamer
945
- this.streamer !== undefined &&
946
- this.streamer.startLiveStream(
947
- request.sessionID,
948
- ffmpegStreaming.stdin,
949
- ffmpegStreaming?.stdio?.[3] ? ffmpegStreaming.stdio[3] : null,
950
- ffmpegAudioTalkback?.stdout ? ffmpegAudioTalkback.stdout : null,
951
- );
952
-
953
- // Store our ffmpeg sessions
954
- ffmpegStreaming && this.#hkSessions[request.sessionID].ffmpeg.push(ffmpegStreaming); // Store ffmpeg process ID
955
- ffmpegAudioTalkback && this.#hkSessions[request.sessionID].ffmpeg.push(ffmpegAudioTalkback); // Store ffmpeg audio return process ID
956
- this.#hkSessions[request.sessionID].video = request.video; // Cache the video request details
957
- this.#hkSessions[request.sessionID].audio = request.audio; // Cache the audio request details
1181
+ // Start the actual streamer process
1182
+ this?.log?.info?.('Live stream started on "%s"%s', this.deviceData.description, ffmpegTalk ? ' (two-way audio enabled)' : '');
1183
+ let { video, audio, talkback } = await this.message(Streamer.MESSAGE, Streamer.MESSAGE_TYPE.START_LIVE, {
1184
+ sessionID: request.sessionID,
1185
+ });
1186
+ // Connect the ffmpeg process to the streamer input/output
1187
+ video?.pipe?.(ffmpegStream?.stdin); // Streamer video ffmpeg stdin (pipe:0)
1188
+ audio?.pipe?.(ffmpegStream?.stdio?.[3]); // Streamer audio → ffmpeg pipe:3
1189
+ ffmpegTalk?.stdout?.pipe?.(talkback); // ffmpeg talkback stdout → Streamer talkback pipe:1
958
1190
  }
959
1191
 
960
- if (request.type === this.hap.StreamRequestTypes.STOP && typeof this.#hkSessions[request.sessionID] === 'object') {
961
- this.streamer !== undefined && this.streamer.stopLiveStream(request.sessionID);
962
-
963
- // Close HomeKit session
964
- this.controller.forceStopStreamingSession(request.sessionID);
965
-
966
- // Close off any running ffmpeg and/or splitter processes we created
967
- if (typeof this.#hkSessions[request.sessionID]?.rtpSplitter?.close === 'function') {
968
- this.#hkSessions[request.sessionID].rtpSplitter.close();
969
- }
970
- this.#hkSessions[request.sessionID].ffmpeg.forEach((ffmpeg) => {
971
- ffmpeg.kill('SIGKILL');
1192
+ if (request.type === this.hap.StreamRequestTypes.STOP && this.#liveSessions.has(request.sessionID)) {
1193
+ // Stop the HomeKit stream and cleanup any associated ffmpeg or RTP splitter sessions
1194
+ await this.message(Streamer.MESSAGE, Streamer.MESSAGE_TYPE.STOP_LIVE, {
1195
+ sessionID: request.sessionID,
972
1196
  });
973
-
974
- delete this.#hkSessions[request.sessionID];
975
-
1197
+ this.controller.forceStopStreamingSession(request.sessionID);
1198
+ this.#liveSessions.get(request.sessionID)?.rtpSplitter?.close?.();
1199
+ this.ffmpeg?.killSession?.(this.uuid, request.sessionID, 'live', 'SIGKILL');
1200
+ this.ffmpeg?.killSession?.(this.uuid, request.sessionID, 'talk', 'SIGKILL');
1201
+ this.#liveSessions.delete(request.sessionID);
976
1202
  this?.log?.info?.('Live stream stopped from "%s"', this.deviceData.description);
977
1203
  }
978
1204
 
979
- if (request.type === this.hap.StreamRequestTypes.RECONFIGURE && typeof this.#hkSessions[request.sessionID] === 'object') {
1205
+ if (request.type === this.hap.StreamRequestTypes.RECONFIGURE && this.#liveSessions.has(request.sessionID)) {
980
1206
  this?.log?.debug?.('Unsupported reconfiguration request for live stream on "%s"', this.deviceData.description);
981
1207
  }
982
1208
 
983
- if (typeof callback === 'function') {
984
- callback(); // do callback if defined
985
- }
986
- }
987
-
988
- updateDevice(deviceData) {
989
- if (typeof deviceData !== 'object' || this.controller === undefined) {
990
- return;
991
- }
992
-
993
- if (this.deviceData.migrating === false && deviceData.migrating === true) {
994
- // Migration happening between Nest <-> Google Home apps. We'll stop any active streams, close the current streaming object
995
- this?.log?.warn?.('Migration between Nest <-> Google Home apps has started for "%s"', deviceData.description);
996
- this.streamer !== undefined && this.streamer.stopEverything();
997
- this.streamer = undefined;
998
- }
999
-
1000
- if (this.deviceData.migrating === true && deviceData.migrating === false) {
1001
- // Migration has completed between Nest <-> Google Home apps
1002
- this?.log?.success?.('Migration between Nest <-> Google Home apps has completed for "%s"', deviceData.description);
1003
- }
1004
-
1005
- // Handle case of changes in streaming protocols OR just finished migration between Nest <-> Google Home apps
1006
- if (this.streamer === undefined && deviceData.migrating === false) {
1007
- if (JSON.stringify(deviceData.streaming_protocols) !== JSON.stringify(this.deviceData.streaming_protocols)) {
1008
- this?.log?.warn?.('Available streaming protocols have changed for "%s"', deviceData.description);
1009
- this.streamer !== undefined && this.streamer.stopEverything();
1010
- this.streamer = undefined;
1011
- }
1012
- if (deviceData.streaming_protocols.includes(PROTOCOLWEBRTC) === true && WebRTC !== undefined) {
1013
- this?.log?.debug?.('Using WebRTC streamer for "%s"', deviceData.description);
1014
- this.streamer = new WebRTC(deviceData, {
1015
- log: this.log,
1016
- buffer:
1017
- deviceData.hksv === true &&
1018
- this?.controller?.recordingManagement?.recordingManagementService !== undefined &&
1019
- this.controller.recordingManagement.recordingManagementService.getCharacteristic(this.hap.Characteristic.Active).value ===
1020
- this.hap.Characteristic.Active.ACTIVE,
1021
- });
1022
- }
1023
-
1024
- if (deviceData.streaming_protocols.includes(PROTOCOLNEXUSTALK) === true && NexusTalk !== undefined) {
1025
- this?.log?.debug?.('Using NexusTalk streamer for "%s"', deviceData.description);
1026
- this.streamer = new NexusTalk(deviceData, {
1027
- log: this.log,
1028
- buffer:
1029
- deviceData.hksv === true &&
1030
- this?.controller?.recordingManagement?.recordingManagementService !== undefined &&
1031
- this.controller.recordingManagement.recordingManagementService.getCharacteristic(this.hap.Characteristic.Active).value ===
1032
- this.hap.Characteristic.Active.ACTIVE,
1033
- });
1034
- }
1035
- }
1036
-
1037
- // Check to see if any activity zones were added for both non-HKSV and HKSV enabled devices
1038
- if (
1039
- Array.isArray(deviceData.activity_zones) === true &&
1040
- JSON.stringify(deviceData.activity_zones) !== JSON.stringify(this.deviceData.activity_zones)
1041
- ) {
1042
- deviceData.activity_zones.forEach((zone) => {
1043
- if (this.deviceData.hksv === false || (this.deviceData.hksv === true && zone.id === 1)) {
1044
- if (this.motionServices?.[zone.id]?.service === undefined) {
1045
- // Zone doesn't have an associated motion sensor, so add one
1046
- let zoneName = zone.id === 1 ? '' : zone.name;
1047
- let tempService = this.addHKService(this.hap.Service.MotionSensor, zoneName, zone.id);
1048
-
1049
- this.addHKCharacteristic(tempService, this.hap.Characteristic.Active);
1050
- tempService.updateCharacteristic(this.hap.Characteristic.Name, zoneName);
1051
- tempService.updateCharacteristic(this.hap.Characteristic.MotionDetected, false); // No motion initially
1052
-
1053
- this.motionServices[zone.id] = { service: tempService, timer: undefined };
1054
- }
1055
- }
1056
- });
1057
- }
1058
-
1059
- // Check to see if any activity zones were removed for both non-HKSV and HKSV enabled devices
1060
- // We'll also update the online status of the camera in the motion service here
1061
- Object.entries(this.motionServices).forEach(([zoneID, service]) => {
1062
- // Set online status
1063
- service.service.updateCharacteristic(
1064
- this.hap.Characteristic.Active,
1065
- deviceData.online === true ? this.hap.Characteristic.Active.ACTIVE : this.hap.Characteristic.Active.INACTIVE,
1066
- );
1067
-
1068
- // Handle deleted zones (excluding zone ID 1 for HKSV)
1069
- if (
1070
- zoneID !== '1' &&
1071
- Array.isArray(deviceData.activity_zones) === true &&
1072
- deviceData.activity_zones.findIndex(({ id }) => id === Number(zoneID)) === -1
1073
- ) {
1074
- // Motion service we created doesn't appear in zone list anymore, so assume deleted
1075
- this.accessory.removeService(service.service);
1076
- delete this.motionServices[zoneID];
1077
- }
1078
- });
1079
-
1080
- if (this.operatingModeService !== undefined) {
1081
- // Update camera off/on status
1082
- this.operatingModeService.updateCharacteristic(
1083
- this.hap.Characteristic.ManuallyDisabled,
1084
- deviceData.streaming_enabled === false
1085
- ? this.hap.Characteristic.ManuallyDisabled.DISABLED
1086
- : this.hap.Characteristic.ManuallyDisabled.ENABLED,
1087
- );
1088
-
1089
- if (deviceData?.has_statusled === true) {
1090
- // Set camera recording indicator. This cannot be turned off on Nest Cameras/Doorbells
1091
- // 0 = auto
1092
- // 1 = low
1093
- // 2 = high
1094
- this.operatingModeService.updateCharacteristic(
1095
- this.hap.Characteristic.CameraOperatingModeIndicator,
1096
- deviceData.statusled_brightness !== 1,
1097
- );
1098
- }
1099
-
1100
- if (deviceData?.has_irled === true) {
1101
- // Set nightvision status in HomeKit
1102
- this.operatingModeService.updateCharacteristic(this.hap.Characteristic.NightVision, deviceData.irled_enabled);
1103
- }
1104
-
1105
- if (deviceData?.has_video_flip === true) {
1106
- // Update image flip status
1107
- this.operatingModeService.updateCharacteristic(this.hap.Characteristic.ImageRotation, deviceData.video_flipped === true ? 180 : 0);
1108
- }
1109
- }
1110
-
1111
- if (deviceData.hksv === true && this.controller?.recordingManagement?.recordingManagementService !== undefined) {
1112
- // Update recording audio status
1113
- this.controller.recordingManagement.recordingManagementService.updateCharacteristic(
1114
- this.hap.Characteristic.RecordingAudioActive,
1115
- deviceData.audio_enabled === true
1116
- ? this.hap.Characteristic.RecordingAudioActive.ENABLE
1117
- : this.hap.Characteristic.RecordingAudioActive.DISABLE,
1118
- );
1119
- }
1120
-
1121
- if (this.controller?.microphoneService !== undefined) {
1122
- // Update microphone volume if specified
1123
- //this.controller.microphoneService.updateCharacteristic(this.hap.Characteristic.Volume, deviceData.xxx);
1124
-
1125
- // if audio is disabled, we'll mute microphone
1126
- this.controller.setMicrophoneMuted(deviceData.audio_enabled === false ? true : false);
1127
- }
1128
- if (this.controller?.speakerService !== undefined) {
1129
- // Update speaker volume if specified
1130
- //this.controller.speakerService.updateCharacteristic(this.hap.Characteristic.Volume, deviceData.xxx);
1131
-
1132
- // if audio is disabled, we'll mute speaker
1133
- this.controller.setSpeakerMuted(deviceData.audio_enabled === false ? true : false);
1134
- }
1135
-
1136
- // Notify our associated streamers about any data changes
1137
- this.streamer !== undefined && this.streamer.update(deviceData);
1138
-
1139
- // Process alerts, the most recent alert is first
1140
- // For HKSV, we're interested motion events
1141
- // For non-HKSV, we're interested motion, face and person events (maybe sound and package later)
1142
- deviceData.alerts.forEach((event) => {
1143
- if (
1144
- this.operatingModeService === undefined ||
1145
- (this.operatingModeService !== undefined &&
1146
- this.operatingModeService.getCharacteristic(this.hap.Characteristic.HomeKitCameraActive).value ===
1147
- this.hap.Characteristic.HomeKitCameraActive.ON)
1148
- ) {
1149
- // We're configured to handle camera events
1150
- // https://github.com/Supereg/secure-video-specification?tab=readme-ov-file#33-homekitcameraactive
1151
-
1152
- // Handle motion event
1153
- // For a HKSV enabled camera, we will use this to trigger the starting of the HKSV recording if the camera is active
1154
- if (event.types.includes('motion') === true) {
1155
- if (this.motionTimer === undefined && (this.deviceData.hksv === false || this.streamer === undefined)) {
1156
- this?.log?.info?.('Motion detected at "%s"', deviceData.description);
1157
- }
1158
-
1159
- event.zone_ids.forEach((zoneID) => {
1160
- if (
1161
- typeof this.motionServices?.[zoneID]?.service === 'object' &&
1162
- this.motionServices[zoneID].service.getCharacteristic(this.hap.Characteristic.MotionDetected).value !== true
1163
- ) {
1164
- // Trigger motion for matching zone of not aleady active
1165
- this.motionServices[zoneID].service.updateCharacteristic(this.hap.Characteristic.MotionDetected, true);
1166
-
1167
- // Log motion started into history
1168
- if (typeof this.historyService?.addHistory === 'function') {
1169
- this.historyService.addHistory(this.motionServices[zoneID].service, {
1170
- time: Math.floor(Date.now() / 1000),
1171
- status: 1,
1172
- });
1173
- }
1174
- }
1175
- });
1176
-
1177
- // Clear any motion active timer so we can extend if more motion detected
1178
- clearTimeout(this.motionTimer);
1179
- this.motionTimer = setTimeout(() => {
1180
- event.zone_ids.forEach((zoneID) => {
1181
- if (typeof this.motionServices?.[zoneID]?.service === 'object') {
1182
- // Mark associted motion services as motion not detected
1183
- this.motionServices[zoneID].service.updateCharacteristic(this.hap.Characteristic.MotionDetected, false);
1184
-
1185
- // Log motion started into history
1186
- if (typeof this.historyService?.addHistory === 'function') {
1187
- this.historyService.addHistory(this.motionServices[zoneID].service, {
1188
- time: Math.floor(Date.now() / 1000),
1189
- status: 0,
1190
- });
1191
- }
1192
- }
1193
- });
1194
-
1195
- this.motionTimer = undefined; // No motion timer active
1196
- }, this.deviceData.motionCooldown * 1000);
1197
- }
1198
-
1199
- // Handle person/face event
1200
- // We also treat a 'face' event the same as a person event ie: if you have a face, you have a person
1201
- if (event.types.includes('person') === true || event.types.includes('face') === true) {
1202
- if (this.personTimer === undefined) {
1203
- // We don't have a person cooldown timer running, so we can process the 'person'/'face' event
1204
- if (this.deviceData.hksv === false || this.streamer === undefined) {
1205
- // We'll only log a person detected event if HKSV is disabled
1206
- this?.log?.info?.('Person detected at "%s"', deviceData.description);
1207
- }
1208
-
1209
- // Cooldown for person being detected
1210
- // Start this before we process further
1211
- this.personTimer = setTimeout(() => {
1212
- this.personTimer = undefined; // No person timer active
1213
- }, this.deviceData.personCooldown * 1000);
1214
-
1215
- if (event.types.includes('motion') === false) {
1216
- // If person/face events doesn't include a motion event, add in here
1217
- // This will handle all the motion triggering stuff
1218
- event.types.push('motion');
1219
- }
1220
- }
1221
- }
1222
- }
1223
- });
1209
+ callback?.(); // do callback if defined
1224
1210
  }
1225
1211
 
1226
1212
  createCameraMotionServices() {
@@ -1239,12 +1225,14 @@ export default class NestCamera extends HomeKitDevice {
1239
1225
  // If we have HKSV video enabled, we'll only create a single motion sensor
1240
1226
  // A zone with the ID of 1 is treated as the main motion sensor
1241
1227
  for (let zone of zones) {
1242
- if (this.deviceData.hksv === true && zone.id !== 1) {
1228
+ if (this.deviceData.hksv === true && this.ffmpeg instanceof FFmpeg === true && zone.id !== 1) {
1243
1229
  continue;
1244
1230
  }
1245
1231
 
1246
1232
  let zoneName = zone.id === 1 ? '' : zone.name;
1247
- let service = this.addHKService(this.hap.Service.MotionSensor, zoneName, zone.id);
1233
+ let eveOptions = zone.id === 1 ? {} : undefined; // Only link EveHome for zone 1
1234
+
1235
+ let service = this.addHKService(this.hap.Service.MotionSensor, zoneName, zone.id, eveOptions);
1248
1236
  this.addHKCharacteristic(service, this.hap.Characteristic.Active);
1249
1237
  service.updateCharacteristic(this.hap.Characteristic.Name, zoneName);
1250
1238
  service.updateCharacteristic(this.hap.Characteristic.MotionDetected, false); // No motion initially
@@ -1256,92 +1244,335 @@ export default class NestCamera extends HomeKitDevice {
1256
1244
 
1257
1245
  generateControllerOptions() {
1258
1246
  // Setup HomeKit controller camera/doorbell options
1247
+
1248
+ let resolutions = [
1249
+ [3840, 2160, 30], // 4K
1250
+ [1920, 1080, 30], // 1080p
1251
+ [1600, 1200, 30], // Native res of Nest Hello
1252
+ [1280, 960, 30],
1253
+ [1280, 720, 30], // 720p
1254
+ [1024, 768, 30],
1255
+ [640, 480, 30],
1256
+ [640, 360, 30],
1257
+ [480, 360, 30],
1258
+ [480, 270, 30],
1259
+ [320, 240, 30],
1260
+ [320, 240, 15], // Apple Watch requires this (plus OPUS @16K)
1261
+ [320, 180, 30],
1262
+ [320, 180, 15],
1263
+ ];
1264
+
1265
+ let profiles = [this.hap.H264Profile.MAIN];
1266
+ let levels = [this.hap.H264Level.LEVEL3_1, this.hap.H264Level.LEVEL3_2, this.hap.H264Level.LEVEL4_0];
1267
+ let videoType = this.hap.VideoCodecType.H264;
1268
+
1259
1269
  let controllerOptions = {
1260
1270
  cameraStreamCount: this.deviceData.maxStreams,
1261
1271
  delegate: this,
1262
- streamingOptions: {
1263
- supportedCryptoSuites: [this.hap.SRTPCryptoSuites.NONE, this.hap.SRTPCryptoSuites.AES_CM_128_HMAC_SHA1_80],
1264
- video: {
1265
- resolutions: [
1266
- // width, height, framerate
1267
- // <--- Need to auto generate this list
1268
- [3840, 2160, 30], // 4K
1269
- [1920, 1080, 30], // 1080p
1270
- [1600, 1200, 30], // Native res of Nest Hello
1271
- [1280, 960, 30],
1272
- [1280, 720, 30], // 720p
1273
- [1024, 768, 30],
1274
- [640, 480, 30],
1275
- [640, 360, 30],
1276
- [480, 360, 30],
1277
- [480, 270, 30],
1278
- [320, 240, 30],
1279
- [320, 240, 15], // Apple Watch requires this configuration (Apple Watch also seems to required OPUS @16K)
1280
- [320, 180, 30],
1281
- [320, 180, 15],
1282
- ],
1283
- codec: {
1284
- type: this.hap.VideoCodecType.H264,
1285
- profiles: [this.hap.H264Profile.MAIN],
1286
- levels: [this.hap.H264Level.LEVEL3_1, this.hap.H264Level.LEVEL3_2, this.hap.H264Level.LEVEL4_0],
1287
- },
1288
- },
1289
- audio: {
1290
- twoWayAudio:
1291
- this.deviceData?.ffmpeg?.libfdk_aac === true &&
1292
- (this.deviceData?.ffmpeg?.libspeex === true || this.deviceData?.ffmpeg?.libopus === true) &&
1293
- this.deviceData.has_speaker === true &&
1294
- this.deviceData.has_microphone === true,
1295
- codecs: [
1296
- {
1297
- type: this.hap.AudioStreamingCodecType.AAC_ELD,
1298
- samplerate: this.hap.AudioStreamingSamplerate.KHZ_16,
1299
- audioChannel: 1,
1272
+ streamingOptions:
1273
+ this.ffmpeg instanceof FFmpeg === true
1274
+ ? {
1275
+ supportedCryptoSuites: [this.hap.SRTPCryptoSuites.NONE, this.hap.SRTPCryptoSuites.AES_CM_128_HMAC_SHA1_80],
1276
+ video: {
1277
+ resolutions,
1278
+ codec: {
1279
+ type: videoType,
1280
+ profiles,
1281
+ levels,
1282
+ },
1283
+ },
1284
+ audio: {
1285
+ twoWayAudio:
1286
+ this.ffmpeg?.features?.encoders?.includes('libfdk_aac') === true &&
1287
+ (this.ffmpeg?.features?.encoders?.includes('libspeex') === true ||
1288
+ this.ffmpeg?.features?.encoders?.includes('libopus') === true) &&
1289
+ this.deviceData.has_speaker === true &&
1290
+ this.deviceData.has_microphone === true,
1291
+ codecs: [
1292
+ {
1293
+ type: this.hap.AudioStreamingCodecType.AAC_ELD,
1294
+ samplerate: this.hap.AudioStreamingSamplerate.KHZ_16,
1295
+ audioChannel: 1,
1296
+ },
1297
+ ],
1298
+ },
1299
+ }
1300
+ : {
1301
+ supportedCryptoSuites: [this.hap.SRTPCryptoSuites.NONE],
1302
+ video: {
1303
+ resolutions: [],
1304
+ codec: {
1305
+ type: videoType,
1306
+ profiles: [],
1307
+ levels: [],
1308
+ },
1309
+ },
1310
+ audio: {
1311
+ twoWayAudio: false,
1312
+ codecs: [],
1313
+ },
1300
1314
  },
1301
- ],
1302
- },
1303
- },
1304
- recording: undefined,
1305
- sensors: undefined,
1315
+ recording:
1316
+ this.deviceData.hksv === true && this.ffmpeg instanceof FFmpeg === true
1317
+ ? {
1318
+ delegate: this,
1319
+ options: {
1320
+ overrideEventTriggerOptions: [this.hap.EventTriggerOption.MOTION],
1321
+ mediaContainerConfiguration: [
1322
+ {
1323
+ fragmentLength: 4000,
1324
+ type: this.hap.MediaContainerType.FRAGMENTED_MP4,
1325
+ },
1326
+ ],
1327
+ prebufferLength: 4000, // Seems to always be 4000???
1328
+ video: {
1329
+ resolutions,
1330
+ parameters: {
1331
+ profiles,
1332
+ levels,
1333
+ },
1334
+ type: videoType,
1335
+ },
1336
+ audio: {
1337
+ codecs: [
1338
+ {
1339
+ type: this.hap.AudioRecordingCodecType.AAC_LC,
1340
+ samplerate: this.hap.AudioRecordingSamplerate.KHZ_16,
1341
+ audioChannel: 1,
1342
+ },
1343
+ ],
1344
+ },
1345
+ },
1346
+ }
1347
+ : undefined,
1348
+ sensors:
1349
+ this.deviceData.hksv === true && this.ffmpeg instanceof FFmpeg === true
1350
+ ? {
1351
+ motion: typeof this.motionServices?.[1]?.service === 'object' ? this.motionServices[1].service : false,
1352
+ }
1353
+ : undefined,
1306
1354
  };
1355
+ return controllerOptions;
1356
+ }
1357
+ }
1358
+
1359
+ // Function to process our RAW Nest or Google for this device type
1360
+ export function processRawData(log, rawData, config, deviceType = undefined) {
1361
+ if (
1362
+ rawData === null ||
1363
+ typeof rawData !== 'object' ||
1364
+ rawData?.constructor !== Object ||
1365
+ typeof config !== 'object' ||
1366
+ config?.constructor !== Object
1367
+ ) {
1368
+ return;
1369
+ }
1307
1370
 
1308
- if (this.deviceData.hksv === true) {
1309
- controllerOptions.recording = {
1310
- delegate: this,
1311
- options: {
1312
- overrideEventTriggerOptions: [this.hap.EventTriggerOption.MOTION],
1313
- mediaContainerConfiguration: [
1371
+ let devices = {};
1372
+
1373
+ // Process data for any camera/doorbell(s) we have in the raw data
1374
+ Object.entries(rawData)
1375
+ .filter(
1376
+ ([key, value]) =>
1377
+ key.startsWith('quartz.') === true ||
1378
+ (key.startsWith('DEVICE_') === true && PROTOBUF_RESOURCES.CAMERA.includes(value.value?.device_info?.typeName) === true),
1379
+ )
1380
+ .forEach(([object_key, value]) => {
1381
+ let tempDevice = {};
1382
+ try {
1383
+ if (
1384
+ value?.source === DATA_SOURCE.GOOGLE &&
1385
+ rawData?.[value.value?.device_info?.pairerId?.resourceId] !== undefined &&
1386
+ Array.isArray(value.value?.streaming_protocol?.supportedProtocols) === true &&
1387
+ value.value.streaming_protocol.supportedProtocols.includes('PROTOCOL_WEBRTC') === true &&
1388
+ (value.value?.configuration_done?.deviceReady === true ||
1389
+ value.value?.camera_migration_status?.state?.where === 'MIGRATED_TO_GOOGLE_HOME')
1390
+ ) {
1391
+ tempDevice = processCommonData(
1392
+ object_key,
1314
1393
  {
1315
- fragmentLength: 4000,
1316
- type: this.hap.MediaContainerType.FRAGMENTED_MP4,
1394
+ type: DEVICE_TYPE.CAMERA,
1395
+ model:
1396
+ value.value.device_info.typeName === 'google.resource.NeonQuartzResource' &&
1397
+ value.value?.floodlight_settings === undefined &&
1398
+ value.value?.floodlight_state === undefined
1399
+ ? 'Cam (battery)'
1400
+ : value.value.device_info.typeName === 'google.resource.GreenQuartzResource'
1401
+ ? 'Doorbell (2nd gen, battery)'
1402
+ : value.value.device_info.typeName === 'google.resource.SpencerResource'
1403
+ ? 'Cam (wired)'
1404
+ : value.value.device_info.typeName === 'google.resource.VenusResource'
1405
+ ? 'Doorbell (2nd gen, wired)'
1406
+ : value.value.device_info.typeName === 'nest.resource.NestCamOutdoorResource'
1407
+ ? 'Cam Outdoor (1st gen)'
1408
+ : value.value.device_info.typeName === 'nest.resource.NestCamIndoorResource'
1409
+ ? 'Cam Indoor (1st gen)'
1410
+ : value.value.device_info.typeName === 'nest.resource.NestCamIQResource'
1411
+ ? 'Cam IQ'
1412
+ : value.value.device_info.typeName === 'nest.resource.NestCamIQOutdoorResource'
1413
+ ? 'Cam Outdoor (1st gen)'
1414
+ : value.value.device_info.typeName === 'nest.resource.NestHelloResource'
1415
+ ? 'Doorbell (1st gen, wired)'
1416
+ : value.value.device_info.typeName === 'google.resource.NeonQuartzResource' &&
1417
+ value.value?.floodlight_settings !== undefined &&
1418
+ value.value?.floodlight_state !== undefined
1419
+ ? 'Cam with Floodlight'
1420
+ : 'Camera (unknown)',
1421
+ softwareVersion: value.value.device_identity.softwareVersion,
1422
+ serialNumber: value.value.device_identity.serialNumber,
1423
+ description: String(value.value?.label?.label ?? ''),
1424
+ location: String(
1425
+ [
1426
+ ...Object.values(
1427
+ rawData?.[value.value?.device_info?.pairerId?.resourceId]?.value?.located_annotations?.predefinedWheres || {},
1428
+ ),
1429
+ ...Object.values(
1430
+ rawData?.[value.value?.device_info?.pairerId?.resourceId]?.value?.located_annotations?.customWheres || {},
1431
+ ),
1432
+ ].find((where) => where?.whereId?.resourceId === value.value?.device_located_settings?.whereAnnotationRid?.resourceId)
1433
+ ?.label?.literal ?? '',
1434
+ ),
1435
+ online: value.value?.liveness?.status === 'LIVENESS_DEVICE_STATUS_ONLINE',
1436
+ audio_enabled: value.value?.microphone_settings?.enableMicrophone === true,
1437
+ has_indoor_chime:
1438
+ value.value?.doorbell_indoor_chime_settings?.chimeType === 'CHIME_TYPE_MECHANICAL' ||
1439
+ value.value?.doorbell_indoor_chime_settings?.chimeType === 'CHIME_TYPE_ELECTRONIC',
1440
+ indoor_chime_enabled: value.value?.doorbell_indoor_chime_settings?.chimeEnabled === true,
1441
+ streaming_enabled: value.value?.recording_toggle?.currentCameraState === 'CAMERA_ON',
1442
+ has_microphone: value.value?.microphone_settings?.enableMicrophone === true,
1443
+ has_speaker: value.value?.speaker_volume?.volume !== undefined,
1444
+ has_motion_detection: value.value?.observation_trigger_capabilities?.videoEventTypes?.motion?.value === true,
1445
+ activity_zones: Array.isArray(value.value?.activity_zone_settings?.activityZones)
1446
+ ? value.value.activity_zone_settings.activityZones.map((zone) => ({
1447
+ id: zone.zoneProperties?.zoneId !== undefined ? zone.zoneProperties.zoneId : zone.zoneProperties.internalIndex,
1448
+ name: HomeKitDevice.makeValidHKName(zone.zoneProperties?.name !== undefined ? zone.zoneProperties.name : ''),
1449
+ hidden: false,
1450
+ uri: '',
1451
+ }))
1452
+ : [],
1453
+ alerts: typeof value.value?.alerts === 'object' ? value.value.alerts : [],
1454
+ quiet_time_enabled:
1455
+ isNaN(value.value?.quiet_time_settings?.quietTimeEnds?.seconds) === false &&
1456
+ Number(value.value.quiet_time_settings.quietTimeEnds.seconds) !== 0 &&
1457
+ Math.floor(Date.now() / 1000) < Number(value.value.quiet_time_settings.quietTimeEnds.seconds),
1458
+ camera_type: value.value.device_identity.vendorProductId,
1459
+ streaming_protocols:
1460
+ value.value?.streaming_protocol?.supportedProtocols !== undefined ? value.value.streaming_protocol.supportedProtocols : [],
1461
+ streaming_host:
1462
+ typeof value.value?.streaming_protocol?.directHost?.value === 'string'
1463
+ ? value.value.streaming_protocol.directHost.value
1464
+ : '',
1465
+ has_light: value.value?.floodlight_settings !== undefined && value.value?.floodlight_state !== undefined,
1466
+ light_enabled: value.value?.floodlight_state?.currentState === 'LIGHT_STATE_ON',
1467
+ light_brightness:
1468
+ isNaN(value.value?.floodlight_settings?.brightness) === false
1469
+ ? scaleValue(Number(value.value.floodlight_settings.brightness), 0, 10, 0, 100)
1470
+ : 0,
1471
+ migrating:
1472
+ value.value?.camera_migration_status?.state?.progress !== undefined &&
1473
+ value.value.camera_migration_status.state.progress !== 'PROGRESS_COMPLETE' &&
1474
+ value.value.camera_migration_status.state.progress !== 'PROGRESS_NONE',
1317
1475
  },
1318
- ],
1319
- prebufferLength: 4000, // Seems to always be 4000???
1320
- video: {
1321
- resolutions: controllerOptions.streamingOptions.video.resolutions,
1322
- parameters: {
1323
- profiles: controllerOptions.streamingOptions.video.codec.profiles,
1324
- levels: controllerOptions.streamingOptions.video.codec.levels,
1476
+ config,
1477
+ );
1478
+ if (tempDevice.model.toUpperCase().includes('DOORBELL') === true) {
1479
+ tempDevice.type = DEVICE_TYPE.DOORBELL;
1480
+ }
1481
+ if (tempDevice.model.toUpperCase().includes('FLOODLIGHT') === true) {
1482
+ tempDevice.type = DEVICE_TYPE.FLOODLIGHT;
1483
+ }
1484
+ }
1485
+
1486
+ if (
1487
+ value?.source === DATA_SOURCE.NEST &&
1488
+ rawData?.['where.' + value.value.structure_id] !== undefined &&
1489
+ value.value?.nexus_api_http_server_url !== undefined &&
1490
+ (value.value?.properties?.['cc2migration.overview_state'] === 'NORMAL' ||
1491
+ value.value?.properties?.['cc2migration.overview_state'] === 'REVERSE_MIGRATION_IN_PROGRESS')
1492
+ ) {
1493
+ // We'll only use the Nest API data for Camera's which have NOT been migrated to Google Home
1494
+ tempDevice = processCommonData(
1495
+ object_key,
1496
+ {
1497
+ type: DEVICE_TYPE.CAMERA,
1498
+ serialNumber: value.value.serial_number,
1499
+ softwareVersion: value.value.software_version,
1500
+ model: value.value.model.replace(/nest\s*/gi, ''), // Use camera/doorbell model that Nest supplies
1501
+ description: typeof value.value?.description === 'string' ? value.value.description : '',
1502
+ location:
1503
+ rawData?.['where.' + value.value.structure_id]?.value?.wheres?.find((where) => where?.where_id === value.value.where_id)
1504
+ ?.name ?? '',
1505
+ streaming_enabled: value.value.streaming_state.includes('enabled') === true,
1506
+ nexus_api_http_server_url: value.value.nexus_api_http_server_url,
1507
+ online: value.value.streaming_state.includes('offline') === false,
1508
+ audio_enabled: value.value.audio_input_enabled === true,
1509
+ has_indoor_chime: value.value?.capabilities.includes('indoor_chime') === true,
1510
+ indoor_chime_enabled: value.value?.properties['doorbell.indoor_chime.enabled'] === true,
1511
+ has_irled: value.value?.capabilities.includes('irled') === true,
1512
+ irled_enabled: value.value?.properties['irled.state'] !== 'always_off',
1513
+ has_statusled: value.value?.capabilities.includes('statusled') === true,
1514
+ has_video_flip: value.value?.capabilities.includes('video.flip') === true,
1515
+ video_flipped: value.value?.properties['video.flipped'] === true,
1516
+ statusled_brightness:
1517
+ isNaN(value.value?.properties?.['statusled.brightness']) === false
1518
+ ? Number(value.value.properties['statusled.brightness'])
1519
+ : 0,
1520
+ has_microphone: value.value?.capabilities.includes('audio.microphone') === true,
1521
+ has_speaker: value.value?.capabilities.includes('audio.speaker') === true,
1522
+ has_motion_detection: value.value?.capabilities.includes('detectors.on_camera') === true,
1523
+ activity_zones: value.value.activity_zones,
1524
+ alerts: typeof value.value?.alerts === 'object' ? value.value.alerts : [],
1525
+ streaming_protocols: ['PROTOCOL_NEXUSTALK'],
1526
+ streaming_host: value.value.direct_nexustalk_host,
1527
+ quiet_time_enabled: false,
1528
+ camera_type: value.value.camera_type,
1529
+ migrating:
1530
+ value.value?.properties?.['cc2migration.overview_state'] !== undefined &&
1531
+ value.value.properties['cc2migration.overview_state'] !== 'NORMAL',
1325
1532
  },
1326
- type: controllerOptions.streamingOptions.video.codec.type,
1327
- },
1328
- audio: {
1329
- codecs: [
1330
- {
1331
- type: this.hap.AudioRecordingCodecType.AAC_LC,
1332
- samplerate: this.hap.AudioRecordingSamplerate.KHZ_16,
1333
- audioChannel: 1,
1334
- },
1335
- ],
1336
- },
1337
- },
1338
- };
1533
+ config,
1534
+ );
1535
+ if (tempDevice.model.toUpperCase().includes('DOORBELL') === true) {
1536
+ tempDevice.type = DEVICE_TYPE.DOORBELL;
1537
+ }
1538
+ if (tempDevice.model.toUpperCase().includes('FLOODLIGHT') === true) {
1539
+ tempDevice.type = DEVICE_TYPE.FLOODLIGHT;
1540
+ }
1541
+ }
1542
+ // eslint-disable-next-line no-unused-vars
1543
+ } catch (error) {
1544
+ log?.debug?.('Error processing camera data for "%s"', object_key);
1545
+ }
1339
1546
 
1340
- controllerOptions.sensors = {
1341
- motion: typeof this.motionServices?.[1]?.service === 'object' ? this.motionServices[1].service : false,
1342
- };
1343
- }
1547
+ if (
1548
+ Object.entries(tempDevice).length !== 0 &&
1549
+ typeof devices[tempDevice.serialNumber] === 'undefined' &&
1550
+ (deviceType === undefined || (typeof deviceType === 'string' && deviceType !== '' && tempDevice.type === deviceType))
1551
+ ) {
1552
+ let deviceOptions = config?.devices?.find(
1553
+ (device) => device?.serialNumber?.toUpperCase?.() === tempDevice?.serialNumber?.toUpperCase?.(),
1554
+ );
1555
+ // Insert any extra options we've read in from configuration file for this device
1556
+ tempDevice.eveHistory = config.options.eveHistory === true || deviceOptions?.eveHistory === true;
1557
+ tempDevice.hksv = (config.options?.hksv === true || deviceOptions?.hksv === true) && config.options?.ffmpeg?.valid === true;
1558
+ tempDevice.doorbellCooldown = parseDurationToSeconds(deviceOptions?.doorbellCooldown, { defaultValue: 60, min: 0, max: 300 });
1559
+ tempDevice.motionCooldown = parseDurationToSeconds(deviceOptions?.motionCooldown, { defaultValue: 60, min: 0, max: 300 });
1560
+ tempDevice.personCooldown = parseDurationToSeconds(deviceOptions?.personCooldown, { defaultValue: 120, min: 0, max: 300 });
1561
+ tempDevice.chimeSwitch = deviceOptions?.chimeSwitch === true; // Control 'indoor' chime by switch
1562
+ tempDevice.localAccess = deviceOptions?.localAccess === true; // Local network video streaming rather than from cloud from camera/doorbells
1563
+ tempDevice.ffmpeg = {
1564
+ binary: config.options.ffmpeg.binary,
1565
+ valid: config.options.ffmpeg.valid === true,
1566
+ debug: deviceOptions?.ffmpegDebug === true || config.options?.ffmpeg.debug === true,
1567
+ hwaccel:
1568
+ (deviceOptions?.ffmpegHWaccel === true || config.options?.ffmpegHWaccel === true) &&
1569
+ config.options.ffmpeg.valid === true &&
1570
+ config.options.ffmpeg.hwaccel === true,
1571
+ };
1572
+ tempDevice.maxStreams = config.options.hksv === true || deviceOptions?.hksv === true ? 1 : 2;
1573
+ devices[tempDevice.serialNumber] = tempDevice; // Store processed device
1574
+ }
1575
+ });
1344
1576
 
1345
- return controllerOptions;
1346
- }
1577
+ return devices;
1347
1578
  }