homebridge-nest-accfactory 0.0.6 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +53 -4
- package/README.md +37 -19
- package/dist/HomeKitDevice.js +132 -109
- package/dist/camera.js +344 -263
- package/dist/doorbell.js +5 -3
- package/dist/floodlight.js +3 -3
- package/dist/nexustalk.js +62 -36
- package/dist/protect.js +2 -2
- package/dist/protobuf/googlehome/foyer.proto +216 -160
- package/dist/res/Nest_camera_connecting.h264 +0 -0
- package/dist/res/Nest_camera_off.h264 +0 -0
- package/dist/res/Nest_camera_offline.h264 +0 -0
- package/dist/res/Nest_camera_transfer.h264 +0 -0
- package/dist/streamer.js +78 -37
- package/dist/system.js +1321 -1263
- package/dist/thermostat.js +73 -27
- package/dist/webrtc.js +582 -0
- package/package.json +31 -29
package/dist/webrtc.js
ADDED
|
@@ -0,0 +1,582 @@
|
|
|
1
|
+
// WebRTC
|
|
2
|
+
// Part of homebridge-nest-accfactory
|
|
3
|
+
//
|
|
4
|
+
// Handles connection and data from Google WebRTC systems
|
|
5
|
+
//
|
|
6
|
+
// Code version 27/9/2024
|
|
7
|
+
// Mark Hulskamp
|
|
8
|
+
'use strict';
|
|
9
|
+
|
|
10
|
+
// Define external library requirements
|
|
11
|
+
import protobuf from 'protobufjs';
|
|
12
|
+
import werift from 'werift';
|
|
13
|
+
|
|
14
|
+
// Define nodejs module requirements
|
|
15
|
+
import EventEmitter from 'node:events';
|
|
16
|
+
import http2 from 'node:http2';
|
|
17
|
+
import { Buffer } from 'node:buffer';
|
|
18
|
+
import { setInterval, clearInterval } from 'node:timers';
|
|
19
|
+
import fs from 'node:fs';
|
|
20
|
+
import path from 'node:path';
|
|
21
|
+
import crypto from 'node:crypto';
|
|
22
|
+
import { fileURLToPath } from 'node:url';
|
|
23
|
+
|
|
24
|
+
// Define our modules
|
|
25
|
+
import Streamer from './streamer.js';
|
|
26
|
+
|
|
27
|
+
// Define constants
|
|
28
|
+
const EXTENDINTERVAL = 120000; // Send extend command to Google Home Foyer every this period for active streams
|
|
29
|
+
const RTP_PACKET_HEADER_SIZE = 12;
|
|
30
|
+
const RTP_VIDEO_PAYLOAD_TYPE = 102;
|
|
31
|
+
const RTP_AUDIO_PAYLOAD_TYPE = 111;
|
|
32
|
+
//const RTP_TALKBACK_PAYLOAD_TYPE = 110;
|
|
33
|
+
const USERAGENT = 'Nest/5.78.0 (iOScom.nestlabs.jasper.release) os=18.0'; // User Agent string
|
|
34
|
+
const GOOGLEHOMEFOYERPREFIX = 'google.internal.home.foyer.v1.';
|
|
35
|
+
const __dirname = path.dirname(fileURLToPath(import.meta.url)); // Make a defined for JS __dirname
|
|
36
|
+
|
|
37
|
+
// Blank audio in AAC format, mono channel @48000
|
|
38
|
+
const AACMONO48000BLANK = Buffer.from([
|
|
39
|
+
0xff, 0xf1, 0x4c, 0x40, 0x03, 0x9f, 0xfc, 0xde, 0x02, 0x00, 0x4c, 0x61, 0x76, 0x63, 0x35, 0x39, 0x2e, 0x31, 0x38, 0x2e, 0x31, 0x30, 0x30,
|
|
40
|
+
0x00, 0x02, 0x30, 0x40, 0x0e,
|
|
41
|
+
]);
|
|
42
|
+
|
|
43
|
+
// Blank audio in opus format, stero channel @48000
|
|
44
|
+
//const OPUSSTEREO48000BLANK = Buffer.from([]);
|
|
45
|
+
|
|
46
|
+
// WebRTC object
|
|
47
|
+
export default class WebRTC extends Streamer {
|
|
48
|
+
token = undefined; // oauth2 token
|
|
49
|
+
localAccess = false; // Do we try direct local access to the camera or via Google Home first
|
|
50
|
+
extendTimer = undefined; // Stream extend timer
|
|
51
|
+
pingTimer = undefined; // Google Hopme Foyer periodic ping
|
|
52
|
+
blankAudio = AACMONO48000BLANK;
|
|
53
|
+
video = {}; // Video stream details once connected
|
|
54
|
+
audio = {}; // Audio stream details once connected
|
|
55
|
+
|
|
56
|
+
// Internal data only for this class
|
|
57
|
+
#protobufFoyer = undefined; // Protobuf for Google Home Foyer
|
|
58
|
+
#googleHomeFoyer = undefined; // HTTP/2 connection to Google Home Foyer APIs
|
|
59
|
+
#id = undefined; // Session ID
|
|
60
|
+
#googleHomeDeviceUUID = undefined; // Normal Nest/Google protobuf device ID translated to a Google Foyer device ID
|
|
61
|
+
#peerConnection = undefined;
|
|
62
|
+
#videoTransceiver = undefined;
|
|
63
|
+
#audioTransceiver = undefined;
|
|
64
|
+
|
|
65
|
+
constructor(deviceData, options) {
|
|
66
|
+
super(deviceData, options);
|
|
67
|
+
|
|
68
|
+
// Load the protobuf for Google Home Foyer. Needed to communicate with camera devices using webrtc
|
|
69
|
+
if (fs.existsSync(path.resolve(__dirname + '/protobuf/googlehome/foyer.proto')) === true) {
|
|
70
|
+
protobuf.util.Long = null;
|
|
71
|
+
protobuf.configure();
|
|
72
|
+
this.#protobufFoyer = protobuf.loadSync(path.resolve(__dirname + '/protobuf/googlehome/foyer.proto'));
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
// Store data we need from the device data passed it
|
|
76
|
+
this.token = deviceData?.apiAccess?.oauth2;
|
|
77
|
+
this.localAccess = deviceData?.localAccess === true;
|
|
78
|
+
|
|
79
|
+
// Set our streamer codec types
|
|
80
|
+
this.codecs = {
|
|
81
|
+
video: 'h264',
|
|
82
|
+
audio: 'opus',
|
|
83
|
+
talk: 'opus',
|
|
84
|
+
};
|
|
85
|
+
|
|
86
|
+
// If specified option to start buffering, kick off
|
|
87
|
+
if (options?.buffer === true) {
|
|
88
|
+
this.startBuffering();
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
// Class functions
|
|
93
|
+
async connect() {
|
|
94
|
+
clearInterval(this.extendTimer);
|
|
95
|
+
this.extendTimer = undefined;
|
|
96
|
+
this.#id = undefined;
|
|
97
|
+
|
|
98
|
+
if (this.#googleHomeDeviceUUID === undefined) {
|
|
99
|
+
// We don't have the 'google id' yet for this device, so obtain
|
|
100
|
+
let homeFoyerResponse = await this.#googleHomeFoyerCommand('StructuresService', 'GetHomeGraph', {
|
|
101
|
+
requestId: crypto.randomUUID(),
|
|
102
|
+
});
|
|
103
|
+
|
|
104
|
+
// Translate our uuid (DEVICE_xxxxxxxxxx) into the associated 'google id' from the Google Home Foyer
|
|
105
|
+
// We need this id for SOME calls to Google Home Foyer services. Gotta love consistancy :-)
|
|
106
|
+
if (homeFoyerResponse?.data?.[0]?.homes !== undefined) {
|
|
107
|
+
Object.values(homeFoyerResponse?.data?.[0]?.homes).forEach((home) => {
|
|
108
|
+
Object.values(home.devices).forEach((device) => {
|
|
109
|
+
if (device?.id?.googleUuid !== undefined && device?.otherIds?.otherThirdPartyId !== undefined) {
|
|
110
|
+
// Test to see if our uuid matches here
|
|
111
|
+
let currentGoogleUuid = device?.id?.googleUuid;
|
|
112
|
+
Object.values(device.otherIds.otherThirdPartyId).forEach((other) => {
|
|
113
|
+
if (other?.id === this.uuid) {
|
|
114
|
+
this.#googleHomeDeviceUUID = currentGoogleUuid;
|
|
115
|
+
}
|
|
116
|
+
});
|
|
117
|
+
}
|
|
118
|
+
});
|
|
119
|
+
});
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
if (this.#googleHomeDeviceUUID !== undefined) {
|
|
124
|
+
// Start setting up connection to camera stream
|
|
125
|
+
this.connected = false; // Starting connection
|
|
126
|
+
let homeFoyerResponse = await this.#googleHomeFoyerCommand('CameraService', 'SendCameraViewIntent', {
|
|
127
|
+
request: {
|
|
128
|
+
googleDeviceId: {
|
|
129
|
+
value: this.#googleHomeDeviceUUID,
|
|
130
|
+
},
|
|
131
|
+
command: 'VIEW_INTENT_START',
|
|
132
|
+
},
|
|
133
|
+
});
|
|
134
|
+
|
|
135
|
+
if (homeFoyerResponse.status !== 0) {
|
|
136
|
+
this.connected = undefined;
|
|
137
|
+
this?.log?.debug && this.log.debug('Request to start camera viewing was not accepted for uuid "%s"', this.uuid);
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
if (homeFoyerResponse.status === 0) {
|
|
141
|
+
// Setup our WebWRTC peer connection for this device
|
|
142
|
+
this.#peerConnection = new werift.RTCPeerConnection({
|
|
143
|
+
iceUseIpv4: true,
|
|
144
|
+
iceUseIpv6: false,
|
|
145
|
+
bundlePolicy: 'max-bundle',
|
|
146
|
+
codecs: {
|
|
147
|
+
audio: [
|
|
148
|
+
new werift.RTCRtpCodecParameters({
|
|
149
|
+
mimeType: 'audio/opus',
|
|
150
|
+
clockRate: 48000,
|
|
151
|
+
channels: 2,
|
|
152
|
+
rtcpFeedback: [{ type: 'transport-cc' }, { type: 'nack' }],
|
|
153
|
+
parameters: 'minptime=10;useinbandfec=1',
|
|
154
|
+
payloadType: RTP_AUDIO_PAYLOAD_TYPE,
|
|
155
|
+
}),
|
|
156
|
+
],
|
|
157
|
+
video: [
|
|
158
|
+
// H264 Main profile, level 4.0
|
|
159
|
+
new werift.RTCRtpCodecParameters({
|
|
160
|
+
mimeType: 'video/H264',
|
|
161
|
+
clockRate: 90000,
|
|
162
|
+
rtcpFeedback: [
|
|
163
|
+
{ type: 'transport-cc' },
|
|
164
|
+
{ type: 'ccm', parameter: 'fir' },
|
|
165
|
+
{ type: 'nack' },
|
|
166
|
+
{ type: 'nack', parameter: 'pli' },
|
|
167
|
+
{ type: 'goog-remb' },
|
|
168
|
+
],
|
|
169
|
+
parameters: 'level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=4de020',
|
|
170
|
+
payloadType: RTP_VIDEO_PAYLOAD_TYPE,
|
|
171
|
+
}),
|
|
172
|
+
],
|
|
173
|
+
},
|
|
174
|
+
headerExtensions: {
|
|
175
|
+
audio: [werift.useTransportWideCC(), werift.useAudioLevelIndication()],
|
|
176
|
+
},
|
|
177
|
+
});
|
|
178
|
+
|
|
179
|
+
this.#peerConnection.createDataChannel('webrtc-datachannel');
|
|
180
|
+
|
|
181
|
+
this.#audioTransceiver = this.#peerConnection.addTransceiver('audio', {
|
|
182
|
+
direction: 'sendrecv',
|
|
183
|
+
});
|
|
184
|
+
|
|
185
|
+
this.#videoTransceiver = this.#peerConnection.addTransceiver('video', {
|
|
186
|
+
direction: 'recvonly',
|
|
187
|
+
});
|
|
188
|
+
|
|
189
|
+
let webRTCOffer = await this.#peerConnection.createOffer();
|
|
190
|
+
await this.#peerConnection.setLocalDescription(webRTCOffer);
|
|
191
|
+
|
|
192
|
+
this?.log?.debug && this.log.debug('Sending WebRTC offer for uuid "%s"', this.uuid);
|
|
193
|
+
|
|
194
|
+
homeFoyerResponse = await this.#googleHomeFoyerCommand('CameraService', 'JoinStream', {
|
|
195
|
+
command: 'offer',
|
|
196
|
+
deviceId: this.uuid,
|
|
197
|
+
local: this.localAccess,
|
|
198
|
+
streamContext: 'STREAM_CONTEXT_DEFAULT',
|
|
199
|
+
requestedVideoResolution: 'VIDEO_RESOLUTION_FULL_HIGH',
|
|
200
|
+
sdp: webRTCOffer.sdp,
|
|
201
|
+
});
|
|
202
|
+
|
|
203
|
+
if (homeFoyerResponse.status !== 0) {
|
|
204
|
+
this.connected = undefined;
|
|
205
|
+
this?.log?.debug && this.log.debug('WebRTC offer was not agreed with remote for uuid "%s"', this.uuid);
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
if (
|
|
209
|
+
homeFoyerResponse.status === 0 &&
|
|
210
|
+
homeFoyerResponse.data?.[0]?.responseType === 'answer' &&
|
|
211
|
+
homeFoyerResponse.data?.[0]?.streamId !== undefined
|
|
212
|
+
) {
|
|
213
|
+
this?.log?.debug && this.log.debug('WebRTC offer agreed with remote for uuid "%s"', this.uuid);
|
|
214
|
+
|
|
215
|
+
this.#audioTransceiver?.onTrack &&
|
|
216
|
+
this.#audioTransceiver.onTrack.subscribe((track) => {
|
|
217
|
+
this.#handlePlaybackBegin(track);
|
|
218
|
+
|
|
219
|
+
track.onReceiveRtp.subscribe((rtp) => {
|
|
220
|
+
this.#handlePlaybackPacket(rtp);
|
|
221
|
+
});
|
|
222
|
+
});
|
|
223
|
+
|
|
224
|
+
this.#videoTransceiver?.onTrack &&
|
|
225
|
+
this.#videoTransceiver.onTrack.subscribe((track) => {
|
|
226
|
+
this.#handlePlaybackBegin(track);
|
|
227
|
+
|
|
228
|
+
track.onReceiveRtp.subscribe((rtp) => {
|
|
229
|
+
this.#handlePlaybackPacket(rtp);
|
|
230
|
+
});
|
|
231
|
+
track.onReceiveRtcp.once(() => {
|
|
232
|
+
setInterval(() => {
|
|
233
|
+
if (this.#videoTransceiver?.receiver !== undefined) {
|
|
234
|
+
this.#videoTransceiver.receiver.sendRtcpPLI(track.ssrc);
|
|
235
|
+
}
|
|
236
|
+
}, 2000);
|
|
237
|
+
});
|
|
238
|
+
});
|
|
239
|
+
|
|
240
|
+
this.#id = homeFoyerResponse.data[0].streamId;
|
|
241
|
+
this.#peerConnection &&
|
|
242
|
+
(await this.#peerConnection.setRemoteDescription({
|
|
243
|
+
type: 'answer',
|
|
244
|
+
sdp: homeFoyerResponse.data[0].sdp,
|
|
245
|
+
}));
|
|
246
|
+
|
|
247
|
+
this?.log?.debug && this.log.debug('Playback started from WebRTC for uuid "%s" with session ID "%s"', this.uuid, this.#id);
|
|
248
|
+
this.connected = true;
|
|
249
|
+
|
|
250
|
+
// Monitor connection status. If closed and there are still output streams, re-connect
|
|
251
|
+
// Never seem to get a 'connected' status. Could use that for something?
|
|
252
|
+
this.#peerConnection &&
|
|
253
|
+
this.#peerConnection.connectionStateChange.subscribe((state) => {
|
|
254
|
+
if (state !== 'connected' && state !== 'connecting') {
|
|
255
|
+
this?.log?.debug && this.log.debug('Connection closed to WebRTC for uuid "%s"', this.uuid);
|
|
256
|
+
this.connected = undefined;
|
|
257
|
+
if (this.haveOutputs() === true) {
|
|
258
|
+
this.connect();
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
});
|
|
262
|
+
|
|
263
|
+
// Create a timer to extend the active stream every period as defined
|
|
264
|
+
this.extendTimer = setInterval(async () => {
|
|
265
|
+
if (
|
|
266
|
+
this.#googleHomeFoyer !== undefined &&
|
|
267
|
+
this.connected === true &&
|
|
268
|
+
this.#id !== undefined &&
|
|
269
|
+
this.#googleHomeDeviceUUID !== undefined
|
|
270
|
+
) {
|
|
271
|
+
let homeFoyerResponse = await this.#googleHomeFoyerCommand('CameraService', 'JoinStream', {
|
|
272
|
+
command: 'extend',
|
|
273
|
+
deviceId: this.uuid,
|
|
274
|
+
streamId: this.#id,
|
|
275
|
+
});
|
|
276
|
+
|
|
277
|
+
if (homeFoyerResponse?.data?.[0]?.streamExtensionStatus !== 'STATUS_STREAM_EXTENDED') {
|
|
278
|
+
this?.log?.debug && this.log.debug('Error occured while requested stream extentions for uuid "%s"', this.uuid);
|
|
279
|
+
|
|
280
|
+
// Do we try to reconnect???
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
}, EXTENDINTERVAL);
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
async close() {
|
|
290
|
+
if (this.#id !== undefined) {
|
|
291
|
+
if (this.audio?.talking !== undefined) {
|
|
292
|
+
// If we're starting or started talk, stop it
|
|
293
|
+
await this.#googleHomeFoyerCommand('CameraService', 'SendTalkback', {
|
|
294
|
+
googleDeviceId: {
|
|
295
|
+
value: this.#googleHomeDeviceUUID,
|
|
296
|
+
},
|
|
297
|
+
streamId: this.#id,
|
|
298
|
+
command: 'COMMAND_STOP',
|
|
299
|
+
});
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
this?.log?.debug && this.log.debug('Notifying remote about closing connection for uuid "%s"', this.uuid);
|
|
303
|
+
await this.#googleHomeFoyerCommand('CameraService', 'JoinStream', {
|
|
304
|
+
command: 'end',
|
|
305
|
+
deviceId: this.uuid,
|
|
306
|
+
streamId: this.#id,
|
|
307
|
+
endStreamReason: 'REASON_USER_EXITED_SESSION',
|
|
308
|
+
});
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
if (this.#googleHomeFoyer !== undefined) {
|
|
312
|
+
this.#googleHomeFoyer.destroy();
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
if (typeof this.#peerConnection?.close === 'function') {
|
|
316
|
+
await this.#peerConnection.close();
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
clearInterval(this.extendTimer);
|
|
320
|
+
this.extendTimer = undefined;
|
|
321
|
+
this.#id = undefined;
|
|
322
|
+
this.#googleHomeFoyer = undefined;
|
|
323
|
+
this.#peerConnection = undefined;
|
|
324
|
+
this.#videoTransceiver = undefined;
|
|
325
|
+
this.#audioTransceiver = undefined;
|
|
326
|
+
this.connected = undefined;
|
|
327
|
+
this.video = {};
|
|
328
|
+
this.audio = {};
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
update(deviceData) {
|
|
332
|
+
if (typeof deviceData !== 'object') {
|
|
333
|
+
return;
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
if (deviceData.apiAccess.oauth2 !== this.token) {
|
|
337
|
+
// OAuth2 token has changed
|
|
338
|
+
this.token = deviceData.apiAccess.oauth2;
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
// Let our parent handle the remaining updates
|
|
342
|
+
super.update(deviceData);
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
async talkingAudio(talkingData) {
|
|
346
|
+
if (
|
|
347
|
+
Buffer.isBuffer(talkingData) === false ||
|
|
348
|
+
this.#googleHomeDeviceUUID === undefined ||
|
|
349
|
+
this.#id === undefined ||
|
|
350
|
+
typeof this.#audioTransceiver?.sender?.sendRtp !== 'function'
|
|
351
|
+
) {
|
|
352
|
+
return;
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
if (talkingData.length !== 0) {
|
|
356
|
+
if (this.audio?.talking === undefined) {
|
|
357
|
+
this.audio.talking = false;
|
|
358
|
+
let homeFoyerResponse = await this.#googleHomeFoyerCommand('CameraService', 'SendTalkback', {
|
|
359
|
+
googleDeviceId: {
|
|
360
|
+
value: this.#googleHomeDeviceUUID,
|
|
361
|
+
},
|
|
362
|
+
streamId: this.#id,
|
|
363
|
+
command: 'COMMAND_START',
|
|
364
|
+
});
|
|
365
|
+
|
|
366
|
+
if (homeFoyerResponse?.status !== 0) {
|
|
367
|
+
this.audio.talking = undefined;
|
|
368
|
+
this?.log?.debug && this.log.debug('Error occured while requesting talkback to start for uuid "%s"', this.uuid);
|
|
369
|
+
}
|
|
370
|
+
if (homeFoyerResponse?.status === 0) {
|
|
371
|
+
this.audio.talking = true;
|
|
372
|
+
this?.log?.debug && this.log.debug('Talking start on uuid "%s"', this.uuid);
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
if (this.audio.talking === true) {
|
|
377
|
+
// Output talkdata to stream. We need to generate an RTP packet for data
|
|
378
|
+
let rtpHeader = new werift.RtpHeader();
|
|
379
|
+
rtpHeader.ssrc = this.#audioTransceiver.sender.ssrc;
|
|
380
|
+
rtpHeader.marker = true;
|
|
381
|
+
rtpHeader.payloadOffset = RTP_PACKET_HEADER_SIZE;
|
|
382
|
+
rtpHeader.payloadType = this.audio.id; // As the camera is send/recv, we use the same payload type id as the incoming audio
|
|
383
|
+
rtpHeader.timestamp = Date.now() & 0xffffffff; // Think the time stanp difference should be 960 per audio packet?
|
|
384
|
+
rtpHeader.sequenceNumber = this.audio.talkSquenceNumber++ & 0xffff;
|
|
385
|
+
let rtpPacket = new werift.RtpPacket(rtpHeader, talkingData);
|
|
386
|
+
this.#audioTransceiver.sender.sendRtp(rtpPacket.serialize());
|
|
387
|
+
}
|
|
388
|
+
}
|
|
389
|
+
|
|
390
|
+
if (talkingData.length === 0 && this.audio?.talking === true) {
|
|
391
|
+
// Buffer length of zero, ised to signal no more talking data for the moment
|
|
392
|
+
let homeFoyerResponse = await this.#googleHomeFoyerCommand('CameraService', 'SendTalkback', {
|
|
393
|
+
googleDeviceId: {
|
|
394
|
+
value: this.#googleHomeDeviceUUID,
|
|
395
|
+
},
|
|
396
|
+
streamId: this.#id,
|
|
397
|
+
command: 'COMMAND_STOP',
|
|
398
|
+
});
|
|
399
|
+
if (homeFoyerResponse?.status !== 0) {
|
|
400
|
+
this?.log?.debug && this.log.debug('Error occured while requesting talkback to stop for uuid "%s"', this.uuid);
|
|
401
|
+
}
|
|
402
|
+
if (homeFoyerResponse?.status === 0) {
|
|
403
|
+
this?.log?.debug && this.log.debug('Talking ended on uuid "%s"', this.uuid);
|
|
404
|
+
}
|
|
405
|
+
this.audio.talking = undefined;
|
|
406
|
+
}
|
|
407
|
+
}
|
|
408
|
+
|
|
409
|
+
#handlePlaybackBegin(weriftTrack) {
|
|
410
|
+
if (weriftTrack === undefined || typeof weriftTrack !== 'object') {
|
|
411
|
+
return;
|
|
412
|
+
}
|
|
413
|
+
|
|
414
|
+
if (weriftTrack?.kind === 'audio') {
|
|
415
|
+
// Store details about the audio track
|
|
416
|
+
this.audio = {
|
|
417
|
+
id: weriftTrack.codec.payloadType, // Audio track payload type being used
|
|
418
|
+
startTime: Date.now(),
|
|
419
|
+
sampleRate: 48000,
|
|
420
|
+
opus: undefined, // Buffer for processing incoming Opus RTP packets
|
|
421
|
+
talkSquenceNumber: weriftTrack?.sender?.sequenceNumber === undefined ? 0 : weriftTrack.sender.sequenceNumber,
|
|
422
|
+
talking: undefined, // undefined = not connected, false = connecting, true = connected and talking
|
|
423
|
+
};
|
|
424
|
+
}
|
|
425
|
+
|
|
426
|
+
if (weriftTrack?.kind === 'video') {
|
|
427
|
+
// Store details about the video track
|
|
428
|
+
this.video = {
|
|
429
|
+
id: weriftTrack.codec.payloadType, // Video track payload type being used
|
|
430
|
+
startTime: Date.now(),
|
|
431
|
+
sampleRate: 90000,
|
|
432
|
+
h264: undefined, // Buffer for processing incoming fragmented H264 RTP packets
|
|
433
|
+
};
|
|
434
|
+
}
|
|
435
|
+
}
|
|
436
|
+
|
|
437
|
+
async #handlePlaybackPacket(weriftRtpPacket) {
|
|
438
|
+
if (weriftRtpPacket === undefined || typeof weriftRtpPacket !== 'object') {
|
|
439
|
+
return;
|
|
440
|
+
}
|
|
441
|
+
|
|
442
|
+
if (weriftRtpPacket.header.payloadType !== undefined && weriftRtpPacket.header.payloadType === this.video?.id) {
|
|
443
|
+
// Process video RTP packets. Need to re-assemble the H264 NALUs into a single H264 frame we can output
|
|
444
|
+
if (weriftRtpPacket.header.padding === false) {
|
|
445
|
+
this.video.h264 = werift.H264RtpPayload.deSerialize(weriftRtpPacket.payload, this.video.h264?.fragment);
|
|
446
|
+
if (this.video.h264?.payload !== undefined) {
|
|
447
|
+
this.addToOutput('video', this.video.h264.payload);
|
|
448
|
+
this.video.h264 = undefined;
|
|
449
|
+
}
|
|
450
|
+
}
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
if (weriftRtpPacket.header.payloadType !== undefined && weriftRtpPacket.header.payloadType === this.audio?.id) {
|
|
454
|
+
// Process audio RTP packet
|
|
455
|
+
this.audio.opus = werift.OpusRtpPayload.deSerialize(weriftRtpPacket.payload);
|
|
456
|
+
if (this.audio.opus?.payload !== undefined) {
|
|
457
|
+
// Until work out audio, send blank aac
|
|
458
|
+
this.addToOutput('audio', AACMONO48000BLANK);
|
|
459
|
+
|
|
460
|
+
// Decode payload to opus??
|
|
461
|
+
//this.addToOutput('audio', this.audio.opus.payload);
|
|
462
|
+
}
|
|
463
|
+
}
|
|
464
|
+
}
|
|
465
|
+
|
|
466
|
+
// Need more work in here*
|
|
467
|
+
// <--- error handling
|
|
468
|
+
// <--- timeout?
|
|
469
|
+
async #googleHomeFoyerCommand(service, command, values) {
|
|
470
|
+
if (typeof service !== 'string' || service === '' || typeof command !== 'string' || command === '' || typeof values !== 'object') {
|
|
471
|
+
return;
|
|
472
|
+
}
|
|
473
|
+
|
|
474
|
+
// Attempt to retrieve both 'Request' and 'Reponse' traits for the associated service and command
|
|
475
|
+
let TraitMapRequest = this.#protobufFoyer.lookup(GOOGLEHOMEFOYERPREFIX + command + 'Request');
|
|
476
|
+
let TraitMapResponse = this.#protobufFoyer.lookup(GOOGLEHOMEFOYERPREFIX + command + 'Response');
|
|
477
|
+
let buffer = Buffer.alloc(0);
|
|
478
|
+
let commandResponse = {
|
|
479
|
+
status: undefined,
|
|
480
|
+
message: '',
|
|
481
|
+
data: [],
|
|
482
|
+
};
|
|
483
|
+
|
|
484
|
+
if (TraitMapRequest !== null && TraitMapResponse !== null && this.token !== undefined) {
|
|
485
|
+
if (this.#googleHomeFoyer === undefined || (this.#googleHomeFoyer?.connected === false && this.#googleHomeFoyer?.closed === true)) {
|
|
486
|
+
// No current HTTP/2 connection or current session is closed
|
|
487
|
+
this?.log?.debug && this.log.debug('Connection started to Google Home Foyer');
|
|
488
|
+
this.#googleHomeFoyer = http2.connect('https://googlehomefoyer-pa.googleapis.com');
|
|
489
|
+
|
|
490
|
+
this.#googleHomeFoyer.on('connect', () => {
|
|
491
|
+
this?.log?.debug && this.log.debug('Connection established to Google Home Foyer');
|
|
492
|
+
|
|
493
|
+
clearInterval(this.pingTimer);
|
|
494
|
+
this.pingTimer = setInterval(() => {
|
|
495
|
+
if (this.#googleHomeFoyer !== undefined) {
|
|
496
|
+
// eslint-disable-next-line no-unused-vars
|
|
497
|
+
this.#googleHomeFoyer.ping((error, duration, payload) => {
|
|
498
|
+
// Do we log error to debug?
|
|
499
|
+
});
|
|
500
|
+
}
|
|
501
|
+
}, 60000); // Every minute?
|
|
502
|
+
});
|
|
503
|
+
|
|
504
|
+
// eslint-disable-next-line no-unused-vars
|
|
505
|
+
this.#googleHomeFoyer.on('goaway', (errorCode, lastStreamID, opaqueData) => {
|
|
506
|
+
//console.log('http2 goaway', errorCode);
|
|
507
|
+
});
|
|
508
|
+
|
|
509
|
+
// eslint-disable-next-line no-unused-vars
|
|
510
|
+
this.#googleHomeFoyer.on('error', (error) => {
|
|
511
|
+
//console.log('http2 error', error);
|
|
512
|
+
// Close??
|
|
513
|
+
});
|
|
514
|
+
|
|
515
|
+
this.#googleHomeFoyer.on('close', () => {
|
|
516
|
+
clearInterval(this.pingTimer);
|
|
517
|
+
this.pingTimer = undefined;
|
|
518
|
+
this.#googleHomeFoyer = undefined;
|
|
519
|
+
this?.log?.debug && this.log.debug('Connection closed to Google Home Foyer');
|
|
520
|
+
});
|
|
521
|
+
}
|
|
522
|
+
|
|
523
|
+
let request = this.#googleHomeFoyer.request({
|
|
524
|
+
':method': 'post',
|
|
525
|
+
':path': '/' + GOOGLEHOMEFOYERPREFIX + service + '/' + command,
|
|
526
|
+
authorization: 'Bearer ' + this.token,
|
|
527
|
+
'content-type': 'application/grpc',
|
|
528
|
+
'user-agent': USERAGENT,
|
|
529
|
+
te: 'trailers',
|
|
530
|
+
'request-id': crypto.randomUUID(),
|
|
531
|
+
'grpc-timeout': '10S',
|
|
532
|
+
});
|
|
533
|
+
|
|
534
|
+
request.on('data', (data) => {
|
|
535
|
+
buffer = Buffer.concat([buffer, data]);
|
|
536
|
+
while (buffer.length >= 5) {
|
|
537
|
+
let headerSize = 5;
|
|
538
|
+
let dataSize = buffer.readUInt32BE(1);
|
|
539
|
+
if (buffer.length < headerSize + dataSize) {
|
|
540
|
+
// We don't have enough data in the buffer yet to process the data
|
|
541
|
+
// so, exit loop and await more data
|
|
542
|
+
break;
|
|
543
|
+
}
|
|
544
|
+
|
|
545
|
+
commandResponse.data.push(TraitMapResponse.decode(buffer.subarray(headerSize, headerSize + dataSize)).toJSON());
|
|
546
|
+
buffer = buffer.subarray(headerSize + dataSize);
|
|
547
|
+
}
|
|
548
|
+
});
|
|
549
|
+
|
|
550
|
+
request.on('trailers', (headers) => {
|
|
551
|
+
if (isNaN(Number(headers?.['grpc-status'])) === false) {
|
|
552
|
+
commandResponse.status = Number(headers['grpc-status']);
|
|
553
|
+
}
|
|
554
|
+
if (headers?.['grpc-message'] !== undefined) {
|
|
555
|
+
commandResponse.message = headers['grpc-message'];
|
|
556
|
+
}
|
|
557
|
+
});
|
|
558
|
+
|
|
559
|
+
request.on('error', (error) => {
|
|
560
|
+
commandResponse.status = error.code;
|
|
561
|
+
commandResponse.message = error.message;
|
|
562
|
+
commandResponse.data = [];
|
|
563
|
+
request.close();
|
|
564
|
+
});
|
|
565
|
+
|
|
566
|
+
if (request !== undefined && request?.closed === false && request?.destroyed === false) {
|
|
567
|
+
// Encode our request values, prefix with header (size of data), then send
|
|
568
|
+
let encodedData = TraitMapRequest.encode(TraitMapRequest.fromObject(values)).finish();
|
|
569
|
+
let header = Buffer.alloc(5);
|
|
570
|
+
header.writeUInt32BE(encodedData.length, 1);
|
|
571
|
+
request.write(Buffer.concat([header, encodedData]));
|
|
572
|
+
request.end();
|
|
573
|
+
|
|
574
|
+
await EventEmitter.once(request, 'close');
|
|
575
|
+
}
|
|
576
|
+
|
|
577
|
+
request.destroy(); // No longer need this request
|
|
578
|
+
}
|
|
579
|
+
|
|
580
|
+
return commandResponse;
|
|
581
|
+
}
|
|
582
|
+
}
|
package/package.json
CHANGED
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
{
|
|
2
|
-
"displayName": "Nest Accfactory",
|
|
3
2
|
"name": "homebridge-nest-accfactory",
|
|
4
|
-
"
|
|
5
|
-
"
|
|
3
|
+
"displayName": "Homebridge Nest Accfactory",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"version": "0.2.1",
|
|
6
6
|
"description": "Homebridge support for Nest/Google devices including HomeKit Secure Video (HKSV) support for doorbells and cameras",
|
|
7
|
-
"license": "Apache-2.0",
|
|
8
7
|
"author": "n0rt0nthec4t",
|
|
9
|
-
"
|
|
8
|
+
"license": "Apache-2.0",
|
|
9
|
+
"homepage": "https://github.com/n0rt0nthec4t/homebridge-nest-accfactory/#readme",
|
|
10
10
|
"repository": {
|
|
11
11
|
"type": "git",
|
|
12
12
|
"url": "git+https://github.com/n0rt0nthec4t/homebridge-nest-accfactory.git"
|
|
@@ -14,26 +14,8 @@
|
|
|
14
14
|
"bugs": {
|
|
15
15
|
"url": "https://github.com/n0rt0nthec4t/homebridge-nest-accfactory/issues"
|
|
16
16
|
},
|
|
17
|
-
"engines": {
|
|
18
|
-
"node": "^18.20.4 || ^20.16.0 || ^22.6.0",
|
|
19
|
-
"homebridge": "^1.8.0 || ^2.0.0-beta.0"
|
|
20
|
-
},
|
|
21
|
-
"main": "dist/index.js",
|
|
22
|
-
"files": [
|
|
23
|
-
"LICENSE",
|
|
24
|
-
"dist/**/*",
|
|
25
|
-
"config.schema.json",
|
|
26
|
-
"README.md",
|
|
27
|
-
"CHANGELOG.md"
|
|
28
|
-
],
|
|
29
|
-
"scripts": {
|
|
30
|
-
"clean": "rimraf ./dist",
|
|
31
|
-
"format": "prettier --write src/**/*.js",
|
|
32
|
-
"lint": "eslint src/**/*.js --max-warnings=100",
|
|
33
|
-
"build": "npm run clean && copyfiles -u 1 src/*.js dist && copyfiles -u 1 src/res/*.h264 dist && copyfiles -u 1 src/res/*.jpg dist && copyfiles -u 1 'src/protobuf/**/*.proto' dist",
|
|
34
|
-
"prepublishOnly": "npm run lint && npm run build"
|
|
35
|
-
},
|
|
36
17
|
"keywords": [
|
|
18
|
+
"homekit",
|
|
37
19
|
"homebridge-plugin",
|
|
38
20
|
"homebridge",
|
|
39
21
|
"hksv",
|
|
@@ -48,20 +30,40 @@
|
|
|
48
30
|
"sensor",
|
|
49
31
|
"floodlight"
|
|
50
32
|
],
|
|
33
|
+
"main": "dist/index.js",
|
|
34
|
+
"engines": {
|
|
35
|
+
"node": "^20.17.0 || ^22.9.0",
|
|
36
|
+
"homebridge": "^1.8.0 || ^2.0.0-beta.0"
|
|
37
|
+
},
|
|
38
|
+
"files": [
|
|
39
|
+
"LICENSE",
|
|
40
|
+
"dist/**/*",
|
|
41
|
+
"config.schema.json",
|
|
42
|
+
"README.md",
|
|
43
|
+
"CHANGELOG.md"
|
|
44
|
+
],
|
|
45
|
+
"scripts": {
|
|
46
|
+
"clean": "rimraf ./dist*",
|
|
47
|
+
"format": "prettier --write src/*.js src/**/*.js",
|
|
48
|
+
"lint": "eslint src/*.js src/**/*.js --max-warnings=20",
|
|
49
|
+
"build": "npm run clean && copyfiles -u 1 src/*.js dist && copyfiles -u 1 src/res/*.h264 dist && copyfiles -u 1 src/res/*.jpg dist && copyfiles -u 1 'src/protobuf/**/*.proto' dist",
|
|
50
|
+
"prepublishOnly": "npm run lint && npm run build"
|
|
51
|
+
},
|
|
51
52
|
"devDependencies": {
|
|
52
|
-
"@eslint/js": "^9.
|
|
53
|
+
"@eslint/js": "^9.11.1",
|
|
53
54
|
"@stylistic/eslint-plugin": "^2.8.0",
|
|
54
|
-
"@types/node": "^
|
|
55
|
-
"@typescript-eslint/parser": "^8.
|
|
55
|
+
"@types/node": "^22.7.4",
|
|
56
|
+
"@typescript-eslint/parser": "^8.8.0",
|
|
56
57
|
"homebridge": "^2.0.0-beta.0",
|
|
57
58
|
"copyfiles": "^2.4.1",
|
|
58
|
-
"eslint": "^9.
|
|
59
|
+
"eslint": "^9.11.1",
|
|
59
60
|
"prettier": "^3.3.3",
|
|
60
61
|
"prettier-eslint": "^16.3.0",
|
|
61
62
|
"rimraf": "^6.0.1"
|
|
62
63
|
},
|
|
63
64
|
"dependencies": {
|
|
64
65
|
"protobufjs": "^7.4.0",
|
|
65
|
-
"ws": "^8.18.0"
|
|
66
|
+
"ws": "^8.18.0",
|
|
67
|
+
"werift": "^0.20.0"
|
|
66
68
|
}
|
|
67
69
|
}
|