@camstack/addon-pipeline 0.1.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. package/dist/audio-analyzer/index.js +723 -0
  2. package/dist/audio-analyzer/index.js.map +1 -0
  3. package/dist/audio-analyzer/index.mjs +683 -0
  4. package/dist/audio-analyzer/index.mjs.map +1 -0
  5. package/dist/audio-codec-nodeav/index.js +467 -0
  6. package/dist/audio-codec-nodeav/index.js.map +1 -0
  7. package/dist/audio-codec-nodeav/index.mjs +467 -0
  8. package/dist/audio-codec-nodeav/index.mjs.map +1 -0
  9. package/dist/decoder-nodeav/index.js +929 -0
  10. package/dist/decoder-nodeav/index.js.map +1 -0
  11. package/dist/decoder-nodeav/index.mjs +907 -0
  12. package/dist/decoder-nodeav/index.mjs.map +1 -0
  13. package/dist/detection-pipeline/index.js +5766 -0
  14. package/dist/detection-pipeline/index.js.map +1 -0
  15. package/dist/detection-pipeline/index.mjs +5725 -0
  16. package/dist/detection-pipeline/index.mjs.map +1 -0
  17. package/dist/index-D_cl0Qqb.js +5791 -0
  18. package/dist/index-D_cl0Qqb.js.map +1 -0
  19. package/dist/index-UbcdLS7a.mjs +5790 -0
  20. package/dist/index-UbcdLS7a.mjs.map +1 -0
  21. package/dist/motion-wasm/index.js +476 -0
  22. package/dist/motion-wasm/index.js.map +1 -0
  23. package/dist/motion-wasm/index.mjs +454 -0
  24. package/dist/motion-wasm/index.mjs.map +1 -0
  25. package/dist/pipeline-runner/index.js +1669 -0
  26. package/dist/pipeline-runner/index.js.map +1 -0
  27. package/dist/pipeline-runner/index.mjs +1647 -0
  28. package/dist/pipeline-runner/index.mjs.map +1 -0
  29. package/dist/stream-broker/@mf-types/compiled-types/stream-broker/widgets/StreamBrokerPanel.d.ts +21 -0
  30. package/dist/stream-broker/@mf-types/compiled-types/stream-broker/widgets/index.d.ts +13 -0
  31. package/dist/stream-broker/@mf-types/widgets.d.ts +2 -0
  32. package/dist/stream-broker/@mf-types.d.ts +3 -0
  33. package/dist/stream-broker/@mf-types.zip +0 -0
  34. package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare___mf_0_camstack_mf_1_sdk__loadShare__.mjs-h5aXOPSA.mjs +12 -0
  35. package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare___mf_0_camstack_mf_1_types__loadShare__.mjs-C-URP6DW.mjs +17 -0
  36. package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare___mf_0_camstack_mf_1_ui_mf_2_library__loadShare__.mjs-69eEmXwl.mjs +20 -0
  37. package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare___mf_0_tanstack_mf_1_react_mf_2_query__loadShare__.mjs-U1EUeEPs.mjs +104 -0
  38. package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare___mf_0_trpc_mf_1_client__loadShare__.mjs-DeouEaSs.mjs +85 -0
  39. package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare___mf_0_trpc_mf_1_react_mf_2_query__loadShare__.mjs-DHUwjbb9.mjs +62 -0
  40. package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare__react__loadShare__.mjs-DePVYdid.mjs +85 -0
  41. package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare__react__loadShare__.mjs_commonjs-proxy-CBlCGyx5.mjs +29 -0
  42. package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare__react_mf_1_jsx_mf_2_runtime__loadShare__.mjs-gBEZsQrp.mjs +36 -0
  43. package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare__react_mf_2_dom__loadShare__.mjs-DYEKzzY-.mjs +45 -0
  44. package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare__react_mf_2_dom__loadShare__.mjs_commonjs-proxy-DZchZKbW.mjs +6 -0
  45. package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare__react_mf_2_dom_mf_1_client__loadShare__.mjs-DICOtMTl.mjs +34 -0
  46. package/dist/stream-broker/_stub.js +752 -0
  47. package/dist/stream-broker/_virtual_mf-localSharedImportMap___mfe_internal__addon_stream_broker_widgets-D6o1e2ed.mjs +156 -0
  48. package/dist/stream-broker/client-BK73l2KT.mjs +10063 -0
  49. package/dist/stream-broker/getErrorShape-BPSzUA7W-TlK8ipWe.mjs +211 -0
  50. package/dist/stream-broker/hostInit-RCeroTVY.mjs +168 -0
  51. package/dist/stream-broker/index-BYclbfM0.mjs +15806 -0
  52. package/dist/stream-broker/index-BhXZh4lQ.mjs +1617 -0
  53. package/dist/stream-broker/index-BxHaCH3N.mjs +725 -0
  54. package/dist/stream-broker/index-D2-K2YJ7.mjs +19268 -0
  55. package/dist/stream-broker/index-IUYKHbxX.mjs +185 -0
  56. package/dist/stream-broker/index-Ss9m7Jum.mjs +2603 -0
  57. package/dist/stream-broker/index-ns1fRD30.mjs +435 -0
  58. package/dist/stream-broker/index-xncRG7-x.mjs +2713 -0
  59. package/dist/stream-broker/index.js +11171 -0
  60. package/dist/stream-broker/index.js.map +1 -0
  61. package/dist/stream-broker/index.mjs +11130 -0
  62. package/dist/stream-broker/index.mjs.map +1 -0
  63. package/dist/stream-broker/jsx-runtime-ZdY5pIZz.mjs +55 -0
  64. package/dist/stream-broker/remoteEntry.js +2973 -0
  65. package/dist/stream-broker/virtualExposes-pCd777Rp.mjs +42 -0
  66. package/package.json +258 -0
  67. package/python/__pycache__/inference_pool.cpython-313.pyc +0 -0
  68. package/python/inference_pool.py +1088 -0
  69. package/python/postprocessors/__init__.py +24 -0
  70. package/python/postprocessors/__pycache__/__init__.cpython-312.pyc +0 -0
  71. package/python/postprocessors/__pycache__/__init__.cpython-313.pyc +0 -0
  72. package/python/postprocessors/__pycache__/_safety.cpython-313.pyc +0 -0
  73. package/python/postprocessors/__pycache__/arcface.cpython-312.pyc +0 -0
  74. package/python/postprocessors/__pycache__/arcface.cpython-313.pyc +0 -0
  75. package/python/postprocessors/__pycache__/ctc.cpython-312.pyc +0 -0
  76. package/python/postprocessors/__pycache__/ctc.cpython-313.pyc +0 -0
  77. package/python/postprocessors/__pycache__/saliency.cpython-312.pyc +0 -0
  78. package/python/postprocessors/__pycache__/saliency.cpython-313.pyc +0 -0
  79. package/python/postprocessors/__pycache__/scrfd.cpython-312.pyc +0 -0
  80. package/python/postprocessors/__pycache__/scrfd.cpython-313.pyc +0 -0
  81. package/python/postprocessors/__pycache__/softmax.cpython-312.pyc +0 -0
  82. package/python/postprocessors/__pycache__/softmax.cpython-313.pyc +0 -0
  83. package/python/postprocessors/__pycache__/yamnet.cpython-312.pyc +0 -0
  84. package/python/postprocessors/__pycache__/yamnet.cpython-313.pyc +0 -0
  85. package/python/postprocessors/__pycache__/yolo.cpython-312.pyc +0 -0
  86. package/python/postprocessors/__pycache__/yolo.cpython-313.pyc +0 -0
  87. package/python/postprocessors/__pycache__/yolo_seg.cpython-312.pyc +0 -0
  88. package/python/postprocessors/__pycache__/yolo_seg.cpython-313.pyc +0 -0
  89. package/python/postprocessors/arcface.py +31 -0
  90. package/python/postprocessors/ctc.py +68 -0
  91. package/python/postprocessors/saliency.py +44 -0
  92. package/python/postprocessors/scrfd.py +212 -0
  93. package/python/postprocessors/softmax.py +43 -0
  94. package/python/postprocessors/yamnet.py +41 -0
  95. package/python/postprocessors/yolo.py +278 -0
  96. package/python/postprocessors/yolo_seg.py +247 -0
  97. package/python/requirements-coreml.txt +4 -0
  98. package/python/requirements-onnxruntime.txt +3 -0
  99. package/python/requirements-openvino.txt +3 -0
  100. package/python/requirements.txt +9 -0
  101. package/swift/audio-analyzer/apple-sound-classifier +0 -0
  102. package/swift/audio-analyzer/apple-sound-classifier.swift +213 -0
  103. package/swift/detection-pipeline/apple-sound-classifier +0 -0
  104. package/swift/detection-pipeline/apple-sound-classifier.swift +196 -0
  105. package/wasm/assembly/index.ts +290 -0
  106. package/wasm/assembly/tsconfig.json +4 -0
  107. package/wasm/motion.wasm +0 -0
@@ -0,0 +1,1669 @@
1
+ "use strict";
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __copyProps = (to, from, except, desc) => {
9
+ if (from && typeof from === "object" || typeof from === "function") {
10
+ for (let key of __getOwnPropNames(from))
11
+ if (!__hasOwnProp.call(to, key) && key !== except)
12
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
13
+ }
14
+ return to;
15
+ };
16
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
17
+ // If the importer is in node compatibility mode or this is not an ESM
18
+ // file that has been converted to a CommonJS file using a Babel-
19
+ // compatible transform (i.e. "__esModule" has not been set), then set
20
+ // "default" to the CommonJS "module.exports" for node compatibility.
21
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
22
+ mod
23
+ ));
24
+ Object.defineProperties(exports, { __esModule: { value: true }, [Symbol.toStringTag]: { value: "Module" } });
25
+ const types = require("@camstack/types");
26
+ const zod = require("zod");
27
+ class FrameQueue {
28
+ constructor(maxSize) {
29
+ this.maxSize = maxSize;
30
+ }
31
+ latest = null;
32
+ _droppedFrames = 0;
33
+ enqueue(frame) {
34
+ if (this.latest !== null) {
35
+ this._droppedFrames++;
36
+ }
37
+ this.latest = frame;
38
+ }
39
+ dequeue() {
40
+ const frame = this.latest ?? void 0;
41
+ this.latest = null;
42
+ return frame;
43
+ }
44
+ get size() {
45
+ return this.latest !== null ? 1 : 0;
46
+ }
47
+ get droppedFrames() {
48
+ return this._droppedFrames;
49
+ }
50
+ clear() {
51
+ this.latest = null;
52
+ }
53
+ }
54
+ class Semaphore {
55
+ _concurrency;
56
+ _available;
57
+ waiters = [];
58
+ constructor(concurrency) {
59
+ this._concurrency = concurrency;
60
+ this._available = concurrency;
61
+ }
62
+ get concurrency() {
63
+ return this._concurrency;
64
+ }
65
+ get available() {
66
+ return this._available;
67
+ }
68
+ /**
69
+ * Change the concurrency limit at runtime. Growing wakes as many
70
+ * pending waiters as possible without exceeding the new headroom;
71
+ * shrinking simply caps `_available` to `max(0, _available + delta)`.
72
+ * In-flight permits are never revoked — the excess will drain
73
+ * naturally as existing callers release.
74
+ */
75
+ resize(newConcurrency) {
76
+ if (newConcurrency < 1) throw new Error("Semaphore: concurrency must be >= 1");
77
+ const delta = newConcurrency - this._concurrency;
78
+ this._concurrency = newConcurrency;
79
+ this._available = Math.max(0, this._available + delta);
80
+ while (this._available > 0 && this.waiters.length > 0) {
81
+ const next = this.waiters.shift();
82
+ if (next) next();
83
+ }
84
+ }
85
+ async acquire() {
86
+ if (this._available > 0) {
87
+ this._available--;
88
+ return () => this.release();
89
+ }
90
+ return new Promise((resolve) => {
91
+ this.waiters.push(() => {
92
+ this._available--;
93
+ resolve(() => this.release());
94
+ });
95
+ });
96
+ }
97
+ release() {
98
+ this._available++;
99
+ const next = this.waiters.shift();
100
+ if (next) next();
101
+ }
102
+ }
103
+ const REPORT_INTERVAL_MS = 1e4;
104
+ class PipelineTimingSampler {
105
+ detSamples = /* @__PURE__ */ new Map();
106
+ motSamples = /* @__PURE__ */ new Map();
107
+ audioSamples = /* @__PURE__ */ new Map();
108
+ droppedFrames = 0;
109
+ reportTimer = null;
110
+ log = null;
111
+ runtimeInfo = {};
112
+ setLogger(logger) {
113
+ this.log = logger;
114
+ }
115
+ start() {
116
+ if (this.reportTimer) return;
117
+ this.reportTimer = setInterval(() => this.report(), REPORT_INTERVAL_MS);
118
+ }
119
+ stop() {
120
+ if (this.reportTimer) {
121
+ clearInterval(this.reportTimer);
122
+ this.reportTimer = null;
123
+ }
124
+ }
125
+ addSample(deviceId, s) {
126
+ if (!this.detSamples.has(deviceId)) this.detSamples.set(deviceId, []);
127
+ this.detSamples.get(deviceId).push(s);
128
+ }
129
+ addMotionSample(deviceId, ms) {
130
+ if (!this.motSamples.has(deviceId)) this.motSamples.set(deviceId, []);
131
+ this.motSamples.get(deviceId).push(ms);
132
+ }
133
+ addAudioSample(deviceId, s) {
134
+ if (!this.audioSamples.has(deviceId)) this.audioSamples.set(deviceId, []);
135
+ this.audioSamples.get(deviceId).push(s);
136
+ }
137
+ addDrop() {
138
+ this.droppedFrames++;
139
+ }
140
+ report() {
141
+ if (!this.log) return;
142
+ const dropped = this.droppedFrames;
143
+ this.droppedFrames = 0;
144
+ const avg = (arr) => arr.length > 0 ? Math.round(arr.reduce((a, b) => a + b, 0) / arr.length) : 0;
145
+ const max = (arr) => arr.length > 0 ? Math.round(Math.max(...arr)) : 0;
146
+ const p95 = (arr) => {
147
+ if (arr.length === 0) return 0;
148
+ const sorted = [...arr].sort((a, b) => a - b);
149
+ return Math.round(sorted[Math.floor(sorted.length * 0.95)] ?? sorted[sorted.length - 1]);
150
+ };
151
+ const rt = this.runtimeInfo;
152
+ for (const [deviceId, det] of this.detSamples) {
153
+ if (det.length === 0) continue;
154
+ const e2e = det.map((s) => s.endToEnd);
155
+ const inf = det.map((s) => s.inference);
156
+ const totalDet = det.reduce((s, d) => s + d.detections, 0);
157
+ this.log.info(
158
+ "pipeline stats",
159
+ {
160
+ tags: { deviceId },
161
+ meta: {
162
+ frames: det.length,
163
+ intervalSec: REPORT_INTERVAL_MS / 1e3,
164
+ e2e: { avg: avg(e2e), p95: p95(e2e), max: max(e2e) },
165
+ inference: { avg: avg(inf), p95: p95(inf) },
166
+ detections: totalDet,
167
+ dropped,
168
+ pipelineRuntime: rt.pipelineRuntime ?? null,
169
+ pipelineModels: rt.pipelineModels ?? null
170
+ }
171
+ }
172
+ );
173
+ }
174
+ this.detSamples.clear();
175
+ for (const [deviceId, mot] of this.motSamples) {
176
+ if (mot.length === 0) continue;
177
+ this.log.info(
178
+ "motion stats",
179
+ {
180
+ tags: { deviceId },
181
+ meta: {
182
+ frames: mot.length,
183
+ intervalSec: REPORT_INTERVAL_MS / 1e3,
184
+ avg: avg(mot),
185
+ p95: p95(mot),
186
+ max: max(mot)
187
+ // motionAddon: rt.motionAddon ?? null,
188
+ }
189
+ }
190
+ );
191
+ }
192
+ this.motSamples.clear();
193
+ for (const [deviceId, aud] of this.audioSamples) {
194
+ if (aud.length === 0) continue;
195
+ const classifyTimes = aud.filter((a) => a.classifyMs > 0).map((a) => a.classifyMs);
196
+ const classified = aud.filter((a) => a.topLabel !== null);
197
+ const topLabels = /* @__PURE__ */ new Map();
198
+ for (const a of classified) {
199
+ if (a.topLabel) topLabels.set(a.topLabel, (topLabels.get(a.topLabel) ?? 0) + 1);
200
+ }
201
+ const topSummary = [...topLabels.entries()].sort((a, b) => b[1] - a[1]).slice(0, 3).map(([l, c]) => `${l}×${c}`).join(", ");
202
+ const avgDbfs = avg(aud.map((a) => Math.round(a.dbfs)));
203
+ this.log.info(
204
+ "audio stats",
205
+ {
206
+ tags: { deviceId },
207
+ meta: {
208
+ chunks: aud.length,
209
+ intervalSec: REPORT_INTERVAL_MS / 1e3,
210
+ classified: classified.length,
211
+ classifyAvgMs: classifyTimes.length > 0 ? avg(classifyTimes) : 0,
212
+ avgDbfs,
213
+ topLabels: topSummary,
214
+ audioEngine: rt.audioEngine ?? null
215
+ }
216
+ }
217
+ );
218
+ }
219
+ this.audioSamples.clear();
220
+ }
221
+ }
222
+ const DEFAULT_MOTION_COOLDOWN_MS = 3e4;
223
+ function toFrameInput$1(frame) {
224
+ return {
225
+ data: frame.data,
226
+ width: frame.width,
227
+ height: frame.height,
228
+ format: frame.format,
229
+ timestamp: frame.timestamp
230
+ };
231
+ }
232
+ class PipelineRunner {
233
+ // Config is mutable (not `readonly`) because `updateLimits()` hot-reloads
234
+ // the four tuning fields when the pipeline-runner addon's
235
+ // `updateAddonSettings` is invoked via the new three-level settings API.
236
+ // The callbacks (`processFrame`, `analyzeMotion`) are invariants captured
237
+ // at construction and never changed.
238
+ config;
239
+ cameras = /* @__PURE__ */ new Map();
240
+ semaphore;
241
+ resultCallbacks = [];
242
+ defaultRoundRobinKeys = [];
243
+ defaultRoundRobinIndex = 0;
244
+ intervalHandle = null;
245
+ detectionStreamHandler = null;
246
+ logger;
247
+ timingSampler = new PipelineTimingSampler();
248
+ constructor(config) {
249
+ this.config = config;
250
+ this.logger = config.logger;
251
+ this.semaphore = new Semaphore(config.maxConcurrentInferences);
252
+ }
253
+ /**
254
+ * Hot-reload the four tuning fields without tearing down the runner.
255
+ * - `maxConcurrentInferences`: resized on the live semaphore; in-flight
256
+ * permits are preserved, new capacity is available immediately.
257
+ * - `maxQueueDepth`: new `FrameQueue`s created from this point on use
258
+ * the updated ceiling. Existing per-camera queues are not resized
259
+ * (the FrameQueue implementation is latest-only and ignores maxSize
260
+ * anyway — see `frame-queue.ts` — so the field is effectively a
261
+ * metadata hint for observability).
262
+ * - `targetLoadPercent` / `minThrottledFps`: stored for future
263
+ * throttling logic (not yet consumed in the current runner body).
264
+ *
265
+ * Only keys present in the patch are overwritten; unspecified keys
266
+ * retain their current value. Any illegal combination (e.g.
267
+ * concurrency < 1) throws and leaves the runner unchanged.
268
+ */
269
+ updateLimits(patch) {
270
+ const next = {
271
+ ...this.config,
272
+ maxQueueDepth: patch.maxQueueDepth ?? this.config.maxQueueDepth,
273
+ maxConcurrentInferences: patch.maxConcurrentInferences ?? this.config.maxConcurrentInferences,
274
+ targetLoadPercent: patch.targetLoadPercent ?? this.config.targetLoadPercent,
275
+ minThrottledFps: patch.minThrottledFps ?? this.config.minThrottledFps
276
+ };
277
+ if (next.maxConcurrentInferences !== this.config.maxConcurrentInferences) {
278
+ this.semaphore.resize(next.maxConcurrentInferences);
279
+ }
280
+ this.config = next;
281
+ }
282
+ /** Read the current tuning fields for diagnostics / tests. */
283
+ getLimits() {
284
+ return {
285
+ maxQueueDepth: this.config.maxQueueDepth,
286
+ maxConcurrentInferences: this.config.maxConcurrentInferences,
287
+ targetLoadPercent: this.config.targetLoadPercent,
288
+ minThrottledFps: this.config.minThrottledFps
289
+ };
290
+ }
291
+ /** Set a handler called when the runner needs to subscribe/unsubscribe the detection stream. */
292
+ onDetectionStreamChange(handler) {
293
+ this.detectionStreamHandler = handler;
294
+ }
295
+ registerCamera(deviceId, registration) {
296
+ const motionQueue = new FrameQueue(this.config.maxQueueDepth);
297
+ const detectionQueue = new FrameQueue(this.config.maxQueueDepth);
298
+ const initialPhase = registration.detectionMode === "disabled" ? "idle" : registration.detectionMode === "always-on" ? "active" : "watching";
299
+ const state = {
300
+ registration,
301
+ motionQueue,
302
+ detectionQueue,
303
+ inferenceTimes: [],
304
+ processedCount: 0,
305
+ startTime: Date.now(),
306
+ phase: initialPhase,
307
+ motionCooldownTimer: null,
308
+ lastArmedSource: null,
309
+ lastArmedRegions: void 0
310
+ };
311
+ this.cameras.set(deviceId, state);
312
+ if (registration.detectionMode === "on-motion") {
313
+ this.defaultRoundRobinKeys.push(deviceId);
314
+ }
315
+ if (initialPhase === "active") {
316
+ this.detectionStreamHandler?.(deviceId, "subscribe");
317
+ const cooldownMs = registration.motionCooldownMs ?? DEFAULT_MOTION_COOLDOWN_MS;
318
+ this.config.onPhaseChanged?.(deviceId, "active", {
319
+ source: "analyzer",
320
+ regions: void 0,
321
+ timestamp: Date.now(),
322
+ cooldownMs
323
+ });
324
+ }
325
+ }
326
+ unregisterCamera(deviceId) {
327
+ const state = this.cameras.get(deviceId);
328
+ if (!state) return;
329
+ if (state.motionCooldownTimer !== null) {
330
+ clearTimeout(state.motionCooldownTimer);
331
+ state.motionCooldownTimer = null;
332
+ }
333
+ if (state.phase === "active") {
334
+ this.detectionStreamHandler?.(deviceId, "unsubscribe");
335
+ }
336
+ state.motionQueue.clear();
337
+ state.detectionQueue.clear();
338
+ this.cameras.delete(deviceId);
339
+ const idx = this.defaultRoundRobinKeys.indexOf(deviceId);
340
+ if (idx !== -1) {
341
+ this.defaultRoundRobinKeys.splice(idx, 1);
342
+ if (this.defaultRoundRobinIndex >= this.defaultRoundRobinKeys.length) {
343
+ this.defaultRoundRobinIndex = 0;
344
+ }
345
+ }
346
+ }
347
+ enqueueMotionFrame(deviceId, frame) {
348
+ const state = this.cameras.get(deviceId);
349
+ if (!state) return;
350
+ state.motionQueue.enqueue(frame);
351
+ }
352
+ enqueueDetectionFrame(deviceId, frame) {
353
+ const state = this.cameras.get(deviceId);
354
+ if (!state) return;
355
+ if (state.phase !== "active") return;
356
+ frame._enqueuedAt = Date.now();
357
+ state.detectionQueue.enqueue(frame);
358
+ }
359
+ /**
360
+ * Report a motion event for a camera. Drives the unified phase
361
+ * machine for both motion sources (analyzer + onboard):
362
+ *
363
+ * - Every `detected: true` (any source) clears + rearms the
364
+ * cooldown timer and transitions watching → active. The same
365
+ * timer applies regardless of which source(s) are configured;
366
+ * concurrent sources just keep refreshing the same window.
367
+ * - `detected: false` is a no-op. Onboard sources never send an
368
+ * explicit clear, and the analyzer's "false" pulses would
369
+ * otherwise fight the cooldown when motion paused briefly
370
+ * during a scene. The timer is the single closure path.
371
+ * - Timer expiry transitions active → watching.
372
+ *
373
+ * Always-on cameras silently ignore reportMotion calls — they're
374
+ * already in `active` and have no cooldown.
375
+ *
376
+ * `source` and `regions` propagate into the phase-transition event
377
+ * so the wrapping addon can attach them to the cap-state slice +
378
+ * bus event.
379
+ */
380
+ reportMotion(deviceId, detected, source = "analyzer", regions = void 0) {
381
+ const state = this.cameras.get(deviceId);
382
+ if (!state) return;
383
+ if (state.registration.detectionMode !== "on-motion") return;
384
+ if (!detected) return;
385
+ state.lastArmedSource = source;
386
+ state.lastArmedRegions = regions;
387
+ const cooldownMs = state.registration.motionCooldownMs ?? DEFAULT_MOTION_COOLDOWN_MS;
388
+ if (state.motionCooldownTimer !== null) {
389
+ clearTimeout(state.motionCooldownTimer);
390
+ state.motionCooldownTimer = null;
391
+ }
392
+ if (state.phase === "watching") {
393
+ this.transitionToActive(deviceId, state, source, regions, cooldownMs);
394
+ }
395
+ state.motionCooldownTimer = setTimeout(() => {
396
+ state.motionCooldownTimer = null;
397
+ this.transitionToWatching(deviceId, state, cooldownMs);
398
+ }, cooldownMs);
399
+ }
400
+ getPhase(deviceId) {
401
+ return this.cameras.get(deviceId)?.phase;
402
+ }
403
+ onResult(callback) {
404
+ this.resultCallbacks.push(callback);
405
+ }
406
+ start() {
407
+ if (this.intervalHandle !== null) return;
408
+ this.intervalHandle = setInterval(() => this.tick(), 10);
409
+ this.timingSampler.start();
410
+ }
411
+ stop() {
412
+ if (this.intervalHandle !== null) {
413
+ clearInterval(this.intervalHandle);
414
+ this.intervalHandle = null;
415
+ }
416
+ this.timingSampler.stop();
417
+ for (const state of this.cameras.values()) {
418
+ if (state.motionCooldownTimer !== null) {
419
+ clearTimeout(state.motionCooldownTimer);
420
+ state.motionCooldownTimer = null;
421
+ }
422
+ }
423
+ }
424
+ getMetrics() {
425
+ let totalQueueDepth = 0;
426
+ let totalInferenceTime = 0;
427
+ let totalInferenceCount = 0;
428
+ for (const state of this.cameras.values()) {
429
+ totalQueueDepth += state.motionQueue.size + state.detectionQueue.size;
430
+ for (const t of state.inferenceTimes) {
431
+ totalInferenceTime += t;
432
+ totalInferenceCount++;
433
+ }
434
+ }
435
+ return {
436
+ activeCameras: this.cameras.size,
437
+ throttledCameras: 0,
438
+ avgInferenceTimeMs: totalInferenceCount > 0 ? totalInferenceTime / totalInferenceCount : 0,
439
+ queueDepth: totalQueueDepth
440
+ };
441
+ }
442
+ getCameraMetrics(deviceId) {
443
+ const state = this.cameras.get(deviceId);
444
+ if (!state) return void 0;
445
+ const elapsedMs = Date.now() - state.startTime;
446
+ const elapsedSec = elapsedMs / 1e3;
447
+ const actualFps = elapsedSec > 0 ? state.processedCount / elapsedSec : 0;
448
+ const times = state.inferenceTimes;
449
+ const avgInference = times.length > 0 ? times.reduce((a, b) => a + b, 0) / times.length : 0;
450
+ return {
451
+ detectionMode: state.registration.detectionMode,
452
+ configuredFps: state.registration.fps,
453
+ actualFps,
454
+ queueDepth: state.motionQueue.size + state.detectionQueue.size,
455
+ avgInferenceTimeMs: avgInference,
456
+ droppedFrames: state.motionQueue.droppedFrames + state.detectionQueue.droppedFrames,
457
+ phase: state.phase
458
+ };
459
+ }
460
+ getAllCameraMetrics() {
461
+ const results = [];
462
+ for (const [deviceId] of this.cameras) {
463
+ const metrics = this.getCameraMetrics(deviceId);
464
+ if (metrics) {
465
+ results.push({ deviceId, ...metrics });
466
+ }
467
+ }
468
+ return results;
469
+ }
470
+ getAttachedCameras() {
471
+ return [...this.cameras.keys()];
472
+ }
473
+ transitionToActive(deviceId, state, source, regions, cooldownMs) {
474
+ state.phase = "active";
475
+ this.logger?.info("motion gate opened — phase=active", {
476
+ tags: { deviceId },
477
+ meta: { detectionMode: state.registration.detectionMode, source }
478
+ });
479
+ this.detectionStreamHandler?.(deviceId, "subscribe");
480
+ this.config.onPhaseChanged?.(deviceId, "active", {
481
+ source,
482
+ regions,
483
+ timestamp: Date.now(),
484
+ cooldownMs
485
+ });
486
+ }
487
+ transitionToWatching(deviceId, state, cooldownMs) {
488
+ state.phase = "watching";
489
+ state.detectionQueue.clear();
490
+ this.logger?.info("motion gate closed — phase=watching", {
491
+ tags: { deviceId },
492
+ meta: { lastSource: state.lastArmedSource }
493
+ });
494
+ this.detectionStreamHandler?.(deviceId, "unsubscribe");
495
+ const source = state.lastArmedSource ?? "analyzer";
496
+ this.config.onPhaseChanged?.(deviceId, "watching", {
497
+ source,
498
+ regions: void 0,
499
+ timestamp: Date.now(),
500
+ cooldownMs
501
+ });
502
+ state.lastArmedSource = null;
503
+ state.lastArmedRegions = void 0;
504
+ }
505
+ tick() {
506
+ this.drainMotionQueues();
507
+ if (this.semaphore.available <= 0) return;
508
+ const picked = this.pickNextDetectionFrame();
509
+ if (!picked) return;
510
+ const { deviceId, frame, state } = picked;
511
+ const frameInput = toFrameInput$1(frame);
512
+ void this.processWithSemaphore(deviceId, frame, frameInput, state, "detection");
513
+ }
514
+ drainMotionQueues() {
515
+ for (const [deviceId, state] of this.cameras) {
516
+ while (state.motionQueue.size > 0) {
517
+ const frame = state.motionQueue.dequeue();
518
+ if (frame) {
519
+ void this.config.analyzeMotion(deviceId, frame);
520
+ }
521
+ }
522
+ }
523
+ }
524
+ async processWithSemaphore(deviceId, frame, frameInput, state, streamType) {
525
+ const pickedAt = Date.now();
526
+ const captureTs = frame.timestamp;
527
+ const enqueuedAt = frame._enqueuedAt ?? captureTs;
528
+ const release = await this.semaphore.acquire();
529
+ const semAcquiredAt = Date.now();
530
+ try {
531
+ const result = await this.config.processFrame(deviceId, frameInput);
532
+ const inferDoneAt = Date.now();
533
+ const inferenceMs = inferDoneAt - semAcquiredAt;
534
+ state.inferenceTimes.push(inferenceMs);
535
+ if (state.inferenceTimes.length > 100) {
536
+ state.inferenceTimes.shift();
537
+ }
538
+ state.processedCount++;
539
+ if (result) {
540
+ await this.notifyCallbacks(deviceId, frame, result, streamType);
541
+ const emittedAt = Date.now();
542
+ this.timingSampler.addSample(deviceId, {
543
+ captureToEnqueue: enqueuedAt - captureTs,
544
+ queueWait: pickedAt - enqueuedAt,
545
+ semaphoreWait: semAcquiredAt - pickedAt,
546
+ inference: inferenceMs,
547
+ resultToEmit: emittedAt - inferDoneAt,
548
+ endToEnd: emittedAt - captureTs,
549
+ detections: result.detections?.length ?? 0
550
+ });
551
+ }
552
+ } finally {
553
+ release();
554
+ }
555
+ }
556
+ async notifyCallbacks(deviceId, frame, result, streamType) {
557
+ for (const callback of this.resultCallbacks) {
558
+ try {
559
+ await callback(deviceId, frame, result, streamType);
560
+ } catch {
561
+ }
562
+ }
563
+ }
564
+ pickNextDetectionFrame() {
565
+ for (const [deviceId, state] of this.cameras) {
566
+ if (state.registration.detectionMode === "always-on" && state.detectionQueue.size > 0) {
567
+ const frame = state.detectionQueue.dequeue();
568
+ return { deviceId, frame, state };
569
+ }
570
+ }
571
+ if (this.defaultRoundRobinKeys.length === 0) return null;
572
+ const startIndex = this.defaultRoundRobinIndex;
573
+ for (let i = 0; i < this.defaultRoundRobinKeys.length; i++) {
574
+ const idx = (startIndex + i) % this.defaultRoundRobinKeys.length;
575
+ const deviceId = this.defaultRoundRobinKeys[idx];
576
+ if (!deviceId) continue;
577
+ const state = this.cameras.get(deviceId);
578
+ if (!state) continue;
579
+ if (state.phase === "active" && state.detectionQueue.size > 0) {
580
+ this.defaultRoundRobinIndex = (idx + 1) % this.defaultRoundRobinKeys.length;
581
+ const frame = state.detectionQueue.dequeue();
582
+ if (!frame) continue;
583
+ return { deviceId, frame, state };
584
+ }
585
+ }
586
+ return null;
587
+ }
588
+ }
589
+ const BenchEngineChoiceSchema = zod.z.object({
590
+ runtime: zod.z.enum(["node", "python"]),
591
+ backend: zod.z.string(),
592
+ format: zod.z.enum(["onnx", "coreml", "openvino", "tflite", "pt"]),
593
+ device: zod.z.string().optional()
594
+ });
595
+ const BenchStepSchema = zod.z.lazy(() => zod.z.object({
596
+ addonId: zod.z.string(),
597
+ modelId: zod.z.string(),
598
+ enabled: zod.z.boolean(),
599
+ children: zod.z.array(BenchStepSchema).optional()
600
+ }));
601
+ const CacheBenchFrameInputSchema = zod.z.object({
602
+ imageBase64: zod.z.string(),
603
+ ttlSeconds: zod.z.number().int().positive().optional()
604
+ });
605
+ const CacheBenchFrameResultSchema = zod.z.object({
606
+ frameId: zod.z.string(),
607
+ width: zod.z.number(),
608
+ height: zod.z.number(),
609
+ expiresAt: zod.z.number()
610
+ });
611
+ const ReleaseBenchFrameInputSchema = zod.z.object({
612
+ frameId: zod.z.string()
613
+ });
614
+ const ReleaseBenchFrameResultSchema = zod.z.object({
615
+ released: zod.z.boolean()
616
+ });
617
+ const RunSyntheticBenchInputSchema = zod.z.object({
618
+ frameId: zod.z.string(),
619
+ steps: zod.z.array(BenchStepSchema).min(1),
620
+ parallel: zod.z.number().int().min(1).max(32),
621
+ iterations: zod.z.number().int().min(1).max(1e4),
622
+ warmup: zod.z.number().int().min(0).max(100).optional(),
623
+ sessionId: zod.z.string().optional(),
624
+ simulatePipeline: zod.z.boolean().optional(),
625
+ engine: BenchEngineChoiceSchema.optional()
626
+ });
627
+ const TimingSplitSchema = zod.z.object({
628
+ mean: zod.z.number(),
629
+ p50: zod.z.number(),
630
+ p95: zod.z.number(),
631
+ p99: zod.z.number()
632
+ });
633
+ const RunSyntheticBenchResultSchema = zod.z.object({
634
+ runs: zod.z.number(),
635
+ wallSec: zod.z.number(),
636
+ fps: zod.z.number(),
637
+ detectionsPerSec: zod.z.number(),
638
+ avgDetections: zod.z.number(),
639
+ callMs: TimingSplitSchema,
640
+ inferMs: zod.z.number(),
641
+ preprocessMs: zod.z.number(),
642
+ predictMs: zod.z.number(),
643
+ batchSizeMean: zod.z.number(),
644
+ batchSizeMax: zod.z.number(),
645
+ engine: zod.z.object({ runtime: zod.z.string(), backend: zod.z.string(), device: zod.z.string().optional() }).optional(),
646
+ tuning: zod.z.object({ batchMode: zod.z.string(), windowMs: zod.z.number(), maxBatchSize: zod.z.number(), concurrency: zod.z.number() }).optional(),
647
+ path: zod.z.string().optional()
648
+ });
649
+ const pipelineRunnerBenchActions = types.defineCustomActions({
650
+ cacheBenchFrame: types.customAction(
651
+ CacheBenchFrameInputSchema,
652
+ CacheBenchFrameResultSchema,
653
+ { kind: "mutation" }
654
+ ),
655
+ releaseBenchFrame: types.customAction(
656
+ ReleaseBenchFrameInputSchema,
657
+ ReleaseBenchFrameResultSchema,
658
+ { kind: "mutation" }
659
+ ),
660
+ runSyntheticBench: types.customAction(
661
+ RunSyntheticBenchInputSchema,
662
+ RunSyntheticBenchResultSchema,
663
+ { kind: "mutation" }
664
+ )
665
+ });
666
+ const DEFAULT_CONFIG = {
667
+ maxQueueDepth: 30,
668
+ // CoreML window accumulator coalesces concurrent calls into a single
669
+ // model.predict([list]) — the more in-flight, the larger the batch and
670
+ // the higher the per-frame throughput. With concurrency=2 the window
671
+ // never fills past batch=2, capping the pool at ~50 fps single-node.
672
+ // 16 matches the slider ceiling and lines up with bench numbers
673
+ // (parallel=16 hits batch=7-8/8, sustaining ~140 fps full path).
674
+ maxConcurrentInferences: 16,
675
+ targetLoadPercent: 80,
676
+ minThrottledFps: 1
677
+ };
678
+ function toFrameInput(frame) {
679
+ return {
680
+ data: frame.data,
681
+ width: frame.width,
682
+ height: frame.height,
683
+ format: frame.format,
684
+ timestamp: frame.timestamp
685
+ };
686
+ }
687
+ const STEP_LOG_INTERVAL_MS = 3e4;
688
+ const METRICS_SNAPSHOT_INTERVAL_MS = 1e3;
689
+ const METRICS_HEARTBEAT_MS = 3e4;
690
+ class PipelineRunnerAddon extends types.BaseAddon {
691
+ runner = null;
692
+ attached = /* @__PURE__ */ new Map();
693
+ nodeId = "unknown";
694
+ stepLogTimer = null;
695
+ metricsSnapshotTimer = null;
696
+ unsubMotionEvents = null;
697
+ /** Last analyzer-detected state per device — gates the
698
+ * `MotionOnMotionChanged` emit in `runMotionAnalysis` to transitions
699
+ * only (otherwise we'd emit on every analyzer frame). */
700
+ lastAnalyzerDetected = /* @__PURE__ */ new Map();
701
+ /**
702
+ * Last positive motion timestamp per device — preserved across the
703
+ * OFF transition so the motion runtime-state slice keeps a stable
704
+ * `lastDetectedAt` after the cooldown closes the phase. Cleared on
705
+ * detach.
706
+ */
707
+ lastMotionAt = /* @__PURE__ */ new Map();
708
+ /**
709
+ * Snapshot-equality cache for metrics-snapshot defer. The runner
710
+ * fires per-camera metrics every `METRICS_SNAPSHOT_INTERVAL_MS`;
711
+ * for an idle camera (no inference, queue empty, fps=0) every tick
712
+ * carries an identical payload. We skip the bus emit when the
713
+ * payload deep-equals the previous one so the events tab + remote
714
+ * subscribers stop seeing 60 metrics-snapshots/min/camera that
715
+ * convey nothing. A periodic heartbeat re-emits every
716
+ * METRICS_HEARTBEAT_MS so consumers know the runner is still
717
+ * alive.
718
+ */
719
+ lastEmittedCameraMetrics = /* @__PURE__ */ new Map();
720
+ lastEmittedRunnerLoad = null;
721
+ /**
722
+ * In-memory bench-frame cache (decoded JPEG bytes). Populated by the
723
+ * `cacheBenchFrame` custom action. Fed into the synthetic-bench loop
724
+ * via the `frame: FrameInput` shape that mirrors what stream-broker
725
+ * delivers to this very addon during real camera detection.
726
+ */
727
+ benchFrameCache = /* @__PURE__ */ new Map();
728
+ benchFrameSweeper = null;
729
+ constructor() {
730
+ super({ ...DEFAULT_CONFIG });
731
+ }
732
+ async onInitialize() {
733
+ const raw = this.ctx.kernel.localNodeId ?? this.ctx.id;
734
+ this.nodeId = raw.includes("/") ? raw.split("/")[0] : raw;
735
+ this.runner = new PipelineRunner({
736
+ maxQueueDepth: this.config.maxQueueDepth,
737
+ maxConcurrentInferences: this.config.maxConcurrentInferences,
738
+ targetLoadPercent: this.config.targetLoadPercent,
739
+ minThrottledFps: this.config.minThrottledFps,
740
+ processFrame: (deviceId, frame) => this.runInference(deviceId, frame),
741
+ analyzeMotion: (deviceId, frame) => this.runMotionAnalysis(deviceId, frame),
742
+ onPhaseChanged: (deviceId, phase, meta) => this.handlePhaseChanged(deviceId, phase, meta),
743
+ logger: this.ctx.logger
744
+ });
745
+ this.runner.timingSampler.setLogger(this.ctx.logger.child("timing"));
746
+ this.runner.onDetectionStreamChange((deviceId, action) => {
747
+ this.handleDetectionStreamChange(deviceId, action);
748
+ });
749
+ this.runner.onResult(async (deviceId, frame, result, _streamType) => {
750
+ this.emitInferenceResult(deviceId, frame, result);
751
+ });
752
+ this.runner.start();
753
+ this.ctx.logger.info(
754
+ "Pipeline runner started",
755
+ {
756
+ tags: { nodeId: this.nodeId },
757
+ meta: {
758
+ maxConcurrent: this.config.maxConcurrentInferences,
759
+ queueDepth: this.config.maxQueueDepth
760
+ }
761
+ }
762
+ );
763
+ if (this.ctx.eventBus) {
764
+ this.unsubMotionEvents = this.ctx.eventBus.subscribe(
765
+ { category: types.EventCategory.MotionOnMotionChanged },
766
+ (event) => {
767
+ const data = event.data;
768
+ const deviceId = data.deviceId;
769
+ const attachment = this.attached.get(deviceId);
770
+ if (!attachment) return;
771
+ const source = data.source;
772
+ if (!attachment.config.motionSources.includes(source)) return;
773
+ this.runner?.reportMotion(
774
+ deviceId,
775
+ data.detected,
776
+ source,
777
+ data.regions ? [...data.regions] : void 0
778
+ );
779
+ }
780
+ );
781
+ }
782
+ this.stepLogTimer = setInterval(() => this.logAttachedSteps(), STEP_LOG_INTERVAL_MS);
783
+ this.metricsSnapshotTimer = setInterval(
784
+ () => this.emitMetricsSnapshot(),
785
+ METRICS_SNAPSHOT_INTERVAL_MS
786
+ );
787
+ return {
788
+ providers: [{ capability: types.pipelineRunnerCapability, provider: this }],
789
+ customActions: pipelineRunnerBenchActions,
790
+ actionHandlers: {
791
+ cacheBenchFrame: async (input) => this.cacheBenchFrame(input),
792
+ releaseBenchFrame: async (input) => this.releaseBenchFrame(input),
793
+ runSyntheticBench: async (input) => this.runSyntheticBench(input)
794
+ }
795
+ };
796
+ }
797
+ async onShutdown() {
798
+ if (this.metricsSnapshotTimer) {
799
+ clearInterval(this.metricsSnapshotTimer);
800
+ this.metricsSnapshotTimer = null;
801
+ }
802
+ if (this.stepLogTimer) {
803
+ clearInterval(this.stepLogTimer);
804
+ this.stepLogTimer = null;
805
+ }
806
+ if (this.benchFrameSweeper) {
807
+ clearInterval(this.benchFrameSweeper);
808
+ this.benchFrameSweeper = null;
809
+ }
810
+ this.benchFrameCache.clear();
811
+ if (this.unsubMotionEvents) {
812
+ this.unsubMotionEvents();
813
+ this.unsubMotionEvents = null;
814
+ }
815
+ this.lastAnalyzerDetected.clear();
816
+ if (this.runner) {
817
+ this.runner.stop();
818
+ this.runner = null;
819
+ }
820
+ for (const attachment of this.attached.values()) {
821
+ attachment.motionUnsubscribe?.();
822
+ attachment.detectionUnsubscribe?.();
823
+ }
824
+ this.attached.clear();
825
+ }
826
+ // ── Synthetic bench (production-equivalent measurement) ───────────────
827
+ async cacheBenchFrame(input) {
828
+ const sharp = (await import("sharp")).default;
829
+ const jpeg = Buffer.from(input.imageBase64, "base64");
830
+ const { data, info } = await sharp(jpeg).raw().toBuffer({ resolveWithObject: true });
831
+ if (info.channels !== 3) {
832
+ throw new Error(`cacheBenchFrame: expected 3 channels (rgb), got ${info.channels}`);
833
+ }
834
+ const rgb = new Uint8Array(data);
835
+ const ttlMs = Math.max(6e4, (input.ttlSeconds ?? 600) * 1e3);
836
+ const frameId = `runner-bench-${Date.now().toString(36)}-${Math.random().toString(36).slice(2, 10)}`;
837
+ const expiresAt = Date.now() + ttlMs;
838
+ this.benchFrameCache.set(frameId, { data: rgb, width: info.width, height: info.height, format: "rgb", expiresAt });
839
+ if (!this.benchFrameSweeper) {
840
+ this.benchFrameSweeper = setInterval(() => this.sweepBenchFrameCache(), 6e4);
841
+ this.benchFrameSweeper.unref?.();
842
+ }
843
+ this.ctx.logger.info("cached bench frame", {
844
+ meta: { frameId, width: info.width, height: info.height, bytes: rgb.length, ttlMs }
845
+ });
846
+ return { frameId, width: info.width, height: info.height, expiresAt };
847
+ }
848
+ async releaseBenchFrame(input) {
849
+ return { released: this.benchFrameCache.delete(input.frameId) };
850
+ }
851
+ sweepBenchFrameCache() {
852
+ const now = Date.now();
853
+ for (const [id, entry] of this.benchFrameCache) {
854
+ if (entry.expiresAt < now) this.benchFrameCache.delete(id);
855
+ }
856
+ }
857
+ async runSyntheticBench(input) {
858
+ const ctx = this.ctx;
859
+ const api = ctx.api;
860
+ if (!api) throw new Error("runSyntheticBench: ctx.api unavailable");
861
+ ctx.logger.info("runSyntheticBench input", {
862
+ meta: { frameId: input.frameId, parallel: input.parallel, iterations: input.iterations }
863
+ });
864
+ const cached = this.benchFrameCache.get(input.frameId);
865
+ if (!cached) {
866
+ throw new Error(`runSyntheticBench: frameId ${input.frameId} not cached (call cacheBenchFrame first)`);
867
+ }
868
+ const stepsToRun = input.steps.map((s) => ({
869
+ addonId: s.addonId,
870
+ modelId: s.modelId,
871
+ enabled: s.enabled,
872
+ children: s.children ?? []
873
+ }));
874
+ const enabledSteps = stepsToRun.filter((s) => s.enabled);
875
+ const isSingleStep = enabledSteps.length === 1 && (!enabledSteps[0].children || enabledSteps[0].children.filter((c) => c.enabled).length === 0);
876
+ const useFastPath = isSingleStep && !input.simulatePipeline;
877
+ const rootStep = enabledSteps[0];
878
+ const sharedFrame = {
879
+ data: cached.data,
880
+ format: cached.format,
881
+ width: cached.width,
882
+ height: cached.height,
883
+ timestamp: Date.now()
884
+ };
885
+ let poolFrameId = null;
886
+ if (useFastPath && rootStep) {
887
+ ctx.logger.info("synthetic bench: using Python cache path", {
888
+ meta: { step: rootStep.addonId, model: rootStep.modelId }
889
+ });
890
+ const cacheResult = await api.pipelineExecutor.cacheFrameInPool.mutate({
891
+ data: new Uint8Array(cached.data.slice().buffer),
892
+ width: cached.width,
893
+ height: cached.height,
894
+ format: cached.format
895
+ });
896
+ poolFrameId = cacheResult.frameId;
897
+ await api.pipelineExecutor.runPipeline.mutate({
898
+ steps: stepsToRun,
899
+ frame: sharedFrame,
900
+ ...input.engine ? { engine: input.engine } : {}
901
+ });
902
+ const warmupCount2 = input.warmup ?? 1;
903
+ for (let w = 0; w < warmupCount2; w++) {
904
+ await api.pipelineExecutor.inferCached.mutate({
905
+ stepId: rootStep.addonId,
906
+ frameId: poolFrameId
907
+ });
908
+ }
909
+ const wallTimings2 = [];
910
+ const inferTimings2 = [];
911
+ const preprocessTimings2 = [];
912
+ const predictTimings2 = [];
913
+ const batchSizes2 = [];
914
+ const detCounts2 = [];
915
+ let _n = 0;
916
+ const sessionId2 = input.sessionId ?? `synth-${Date.now().toString(36)}`;
917
+ const totalRuns2 = input.parallel * input.iterations;
918
+ const wallStart2 = performance.now();
919
+ const worker2 = async () => {
920
+ for (let i = 0; i < input.iterations; i++) {
921
+ const t0 = performance.now();
922
+ const result = await api.pipelineExecutor.inferCached.mutate({
923
+ stepId: rootStep.addonId,
924
+ frameId: poolFrameId
925
+ });
926
+ const wallMs = performance.now() - t0;
927
+ const r = result;
928
+ const inferMs = typeof r["inferenceMs"] === "number" ? r["inferenceMs"] : wallMs;
929
+ const preMs = typeof r["preprocessMs"] === "number" ? r["preprocessMs"] : 0;
930
+ const predMs = typeof r["predictMs"] === "number" ? r["predictMs"] : 0;
931
+ const bs = typeof r["batchSize"] === "number" ? r["batchSize"] : 1;
932
+ const dets = Array.isArray(r["detections"]) ? r["detections"].length : 0;
933
+ wallTimings2.push(wallMs);
934
+ inferTimings2.push(inferMs);
935
+ preprocessTimings2.push(preMs);
936
+ predictTimings2.push(predMs);
937
+ batchSizes2.push(bs);
938
+ detCounts2.push(dets);
939
+ const n = ++_n;
940
+ if (n <= 20) {
941
+ ctx.logger.info("bench call trace (cached)", {
942
+ meta: { n, wallMs: Math.round(wallMs), inferMs: Math.round(inferMs), preMs: Math.round(preMs * 10) / 10, predMs: Math.round(predMs * 10) / 10, bs }
943
+ });
944
+ }
945
+ if (n % Math.max(1, input.parallel) === 0) {
946
+ const elapsed = (performance.now() - wallStart2) / 1e3;
947
+ const fps = elapsed > 0 ? n / elapsed : 0;
948
+ const meanCallMs = wallTimings2.reduce((s, v) => s + v, 0) / wallTimings2.length;
949
+ const sorted = [...wallTimings2].sort((a, b) => a - b);
950
+ const p95 = sorted[Math.min(sorted.length - 1, Math.floor(0.95 * sorted.length))] ?? 0;
951
+ const totalDet = detCounts2.reduce((s, v) => s + v, 0);
952
+ const avgDet = detCounts2.length > 0 ? totalDet / detCounts2.length : 0;
953
+ const bsMean = batchSizes2.reduce((s, v) => s + v, 0) / batchSizes2.length;
954
+ const msg = `runs ${n}/${totalRuns2} · ${fps.toFixed(1)} fps · call ${meanCallMs.toFixed(1)}ms · batch ${bsMean.toFixed(1)}`;
955
+ if (ctx.eventBus) {
956
+ ctx.eventBus.emit({
957
+ id: `bench-${n}`,
958
+ timestamp: /* @__PURE__ */ new Date(),
959
+ source: { type: "pipeline", id: "synthetic-bench" },
960
+ category: types.EventCategory.PipelineProgress,
961
+ data: {
962
+ nodeId: "hub",
963
+ sessionId: sessionId2,
964
+ step: "synthetic-bench",
965
+ message: msg,
966
+ benchProgress: true,
967
+ runs: n,
968
+ totalRuns: totalRuns2,
969
+ fps: Math.round(fps * 100) / 100,
970
+ meanMs: Math.round(meanCallMs * 100) / 100,
971
+ p95Ms: Math.round(p95 * 100) / 100,
972
+ inferMeanMs: Math.round(inferTimings2.reduce((s, v) => s + v, 0) / inferTimings2.length * 100) / 100,
973
+ preprocessMeanMs: Math.round(preprocessTimings2.reduce((s, v) => s + v, 0) / preprocessTimings2.length * 100) / 100,
974
+ predictMeanMs: Math.round(predictTimings2.reduce((s, v) => s + v, 0) / predictTimings2.length * 100) / 100,
975
+ batchSizeMean: Math.round(bsMean * 100) / 100,
976
+ detPerSec: elapsed > 0 ? Math.round(totalDet / elapsed * 100) / 100 : 0,
977
+ avgDetections: Math.round(avgDet * 100) / 100
978
+ }
979
+ });
980
+ } else {
981
+ ctx.logger.warn("emitProgress: NO eventBus");
982
+ }
983
+ }
984
+ }
985
+ };
986
+ await Promise.all(Array.from({ length: input.parallel }, () => worker2()));
987
+ const wallSec2 = (performance.now() - wallStart2) / 1e3;
988
+ await api.pipelineExecutor.uncacheFrame.mutate({ frameId: poolFrameId }).catch(() => {
989
+ });
990
+ return this.buildBenchResult(wallTimings2, inferTimings2, preprocessTimings2, predictTimings2, batchSizes2, detCounts2, wallSec2, "cached");
991
+ }
992
+ ctx.logger.info("synthetic bench: using full runPipeline path", {
993
+ meta: { steps: enabledSteps.length, simulatePipeline: !!input.simulatePipeline }
994
+ });
995
+ let _callCount = 0;
996
+ const callOnce = async () => {
997
+ const t0 = performance.now();
998
+ const result = await api.pipelineExecutor.runPipeline.mutate({
999
+ steps: stepsToRun,
1000
+ frame: sharedFrame,
1001
+ ...input.engine ? { engine: input.engine } : {}
1002
+ });
1003
+ const wallMs = performance.now() - t0;
1004
+ const n = ++_callCount;
1005
+ if (n <= 20) {
1006
+ ctx.logger.info("bench call trace", {
1007
+ meta: {
1008
+ n,
1009
+ wallMs: Math.round(wallMs),
1010
+ totalInferenceMs: Math.round(result.debug?.totalInferenceMs ?? 0),
1011
+ predictMs: Math.round((result.debug?.predictMs ?? 0) * 10) / 10,
1012
+ preprocessMs: Math.round((result.debug?.preprocessMs ?? 0) * 10) / 10,
1013
+ batchSize: result.debug?.batchSize ?? 1
1014
+ }
1015
+ });
1016
+ }
1017
+ return { wallMs, result };
1018
+ };
1019
+ const warmupCount = input.warmup ?? 1;
1020
+ for (let i = 0; i < warmupCount; i++) {
1021
+ await callOnce();
1022
+ }
1023
+ const wallTimings = [];
1024
+ const serverWallTimings = [];
1025
+ const inferTimings = [];
1026
+ const preprocessTimings = [];
1027
+ const predictTimings = [];
1028
+ const batchSizes = [];
1029
+ const detCounts = [];
1030
+ const sessionId = input.sessionId ?? `synth-${Date.now().toString(36)}`;
1031
+ const totalRuns = input.parallel * input.iterations;
1032
+ const wallStart = performance.now();
1033
+ const worker = async () => {
1034
+ for (let i = 0; i < input.iterations; i++) {
1035
+ const { wallMs, result } = await callOnce();
1036
+ wallTimings.push(wallMs);
1037
+ serverWallTimings.push(result.debug?.wallMs ?? 0);
1038
+ inferTimings.push(result.debug?.totalInferenceMs ?? 0);
1039
+ preprocessTimings.push(result.debug?.preprocessMs ?? 0);
1040
+ predictTimings.push(result.debug?.predictMs ?? 0);
1041
+ batchSizes.push(result.debug?.batchSize ?? 1);
1042
+ detCounts.push(result.detections?.length ?? 0);
1043
+ const n = wallTimings.length;
1044
+ if (n % Math.max(1, input.parallel) === 0 && ctx.eventBus) {
1045
+ const elapsed = (performance.now() - wallStart) / 1e3;
1046
+ const fps = elapsed > 0 ? n / elapsed : 0;
1047
+ const meanMs = wallTimings.reduce((s, v) => s + v, 0) / n;
1048
+ const sorted = [...wallTimings].sort((a, b) => a - b);
1049
+ const p95 = sorted[Math.min(sorted.length - 1, Math.floor(0.95 * sorted.length))] ?? 0;
1050
+ const totalDet = detCounts.reduce((s, v) => s + v, 0);
1051
+ const bsMean = batchSizes.reduce((s, v) => s + v, 0) / n;
1052
+ ctx.eventBus.emit({
1053
+ id: `bench-${n}`,
1054
+ timestamp: /* @__PURE__ */ new Date(),
1055
+ source: { type: "pipeline", id: "synthetic-bench" },
1056
+ category: types.EventCategory.PipelineProgress,
1057
+ data: {
1058
+ nodeId: "hub",
1059
+ sessionId,
1060
+ step: "synthetic-bench",
1061
+ message: `runs ${n}/${totalRuns} · ${fps.toFixed(1)} fps · call ${meanMs.toFixed(1)}ms · batch ${bsMean.toFixed(1)}`,
1062
+ benchProgress: true,
1063
+ runs: n,
1064
+ totalRuns,
1065
+ fps: Math.round(fps * 100) / 100,
1066
+ meanMs: Math.round(meanMs * 100) / 100,
1067
+ p95Ms: Math.round(p95 * 100) / 100,
1068
+ inferMeanMs: Math.round(inferTimings.reduce((s, v) => s + v, 0) / n * 100) / 100,
1069
+ preprocessMeanMs: Math.round(preprocessTimings.reduce((s, v) => s + v, 0) / n * 100) / 100,
1070
+ predictMeanMs: Math.round(predictTimings.reduce((s, v) => s + v, 0) / n * 100) / 100,
1071
+ batchSizeMean: Math.round(bsMean * 100) / 100,
1072
+ detPerSec: elapsed > 0 ? Math.round(totalDet / elapsed * 100) / 100 : 0,
1073
+ avgDetections: n > 0 ? Math.round(totalDet / n * 100) / 100 : 0
1074
+ }
1075
+ });
1076
+ }
1077
+ }
1078
+ };
1079
+ await Promise.all(Array.from({ length: input.parallel }, () => worker()));
1080
+ const wallSec = (performance.now() - wallStart) / 1e3;
1081
+ return this.buildBenchResult(wallTimings, inferTimings, preprocessTimings, predictTimings, batchSizes, detCounts, wallSec, "pipeline");
1082
+ }
1083
+ async buildBenchResult(wallTimings, inferTimings, preprocessTimings, predictTimings, batchSizes, detCounts, wallSec, path) {
1084
+ const meanOfArr = (xs) => xs.length > 0 ? xs.reduce((s, v) => s + v, 0) / xs.length : 0;
1085
+ this.ctx.logger.info("synthetic bench summary", {
1086
+ meta: {
1087
+ runs: wallTimings.length,
1088
+ wallSec: Math.round(wallSec * 100) / 100,
1089
+ fps: Math.round(wallTimings.length / wallSec * 100) / 100,
1090
+ callMeanMs: Math.round(meanOfArr(wallTimings)),
1091
+ inferMeanMs: Math.round(meanOfArr(inferTimings)),
1092
+ preprocessMeanMs: Math.round(meanOfArr(preprocessTimings)),
1093
+ predictMeanMs: Math.round(meanOfArr(predictTimings)),
1094
+ batchSizeMean: Math.round(meanOfArr(batchSizes) * 100) / 100,
1095
+ batchSizeMax: batchSizes.length > 0 ? Math.max(...batchSizes) : 0
1096
+ }
1097
+ });
1098
+ const sorted = [...wallTimings].sort((a, b) => a - b);
1099
+ const pick = (q) => sorted.length > 0 ? sorted[Math.min(sorted.length - 1, Math.floor(q * sorted.length))] : 0;
1100
+ const meanOf = (xs) => xs.length > 0 ? xs.reduce((s, v) => s + v, 0) / xs.length : 0;
1101
+ const totalRuns = wallTimings.length;
1102
+ const totalDet = detCounts.reduce((s, v) => s + v, 0);
1103
+ return {
1104
+ runs: totalRuns,
1105
+ wallSec: Math.round(wallSec * 1e3) / 1e3,
1106
+ fps: wallSec > 0 ? Math.round(totalRuns / wallSec * 100) / 100 : 0,
1107
+ detectionsPerSec: wallSec > 0 ? Math.round(totalDet / wallSec * 100) / 100 : 0,
1108
+ avgDetections: totalRuns > 0 ? Math.round(totalDet / totalRuns * 100) / 100 : 0,
1109
+ callMs: {
1110
+ mean: Math.round(meanOf(wallTimings) * 100) / 100,
1111
+ p50: Math.round(pick(0.5) * 100) / 100,
1112
+ p95: Math.round(pick(0.95) * 100) / 100,
1113
+ p99: Math.round(pick(0.99) * 100) / 100
1114
+ },
1115
+ inferMs: Math.round(meanOf(inferTimings) * 100) / 100,
1116
+ preprocessMs: Math.round(meanOf(preprocessTimings) * 100) / 100,
1117
+ predictMs: Math.round(meanOf(predictTimings) * 100) / 100,
1118
+ batchSizeMean: Math.round(meanOf(batchSizes) * 100) / 100,
1119
+ batchSizeMax: batchSizes.length > 0 ? Math.max(...batchSizes) : 0,
1120
+ path,
1121
+ ...await this.getEngineAndTuning()
1122
+ };
1123
+ }
1124
+ async getEngineAndTuning() {
1125
+ try {
1126
+ const api = this.ctx.api;
1127
+ if (!api) return {};
1128
+ const [eng, tuning] = await Promise.all([
1129
+ api.pipelineExecutor.getSelectedEngine.query(),
1130
+ api.pipelineExecutor.getEffectiveTuning.query()
1131
+ ]);
1132
+ return {
1133
+ engine: eng ? { runtime: eng.runtime, backend: eng.backend, device: eng.device } : void 0,
1134
+ tuning: tuning ?? void 0
1135
+ };
1136
+ } catch {
1137
+ return {};
1138
+ }
1139
+ }
1140
+ // ── IPipelineRunnerProvider implementation ────────────────────────────
1141
+ async attachCamera(config) {
1142
+ const runner = this.runner;
1143
+ const ctx = this.ctx;
1144
+ if (!runner || !ctx) {
1145
+ throw new Error("PipelineRunnerAddon: attachCamera called before initialize completed");
1146
+ }
1147
+ this.ctx.logger.info("attachCamera received config", {
1148
+ tags: { deviceId: config.deviceId },
1149
+ meta: {
1150
+ motionSources: config.motionSources,
1151
+ motionSourcesType: Array.isArray(config.motionSources) ? `array(${config.motionSources.length})` : typeof config.motionSources,
1152
+ motionStreamId: config.motionStreamId,
1153
+ detectionStreamId: config.detectionStreamId,
1154
+ keys: Object.keys(config)
1155
+ }
1156
+ });
1157
+ if (this.attached.has(config.deviceId)) {
1158
+ this.detachInternal(config.deviceId);
1159
+ }
1160
+ runner.registerCamera(config.deviceId, {
1161
+ detectionMode: config.detectionMode,
1162
+ fps: config.detectionFps,
1163
+ motionCooldownMs: config.motionCooldownMs
1164
+ });
1165
+ const attachment = {
1166
+ config,
1167
+ motionUnsubscribe: null,
1168
+ detectionUnsubscribe: null
1169
+ };
1170
+ this.attached.set(config.deviceId, attachment);
1171
+ if (config.motionSources.includes("analyzer")) {
1172
+ attachment.motionUnsubscribe = await this.subscribeMotionFrames(config);
1173
+ }
1174
+ const stepsCount = config.steps?.length ?? 0;
1175
+ const dispatch = stepsCount > 0 ? `runPipeline(${stepsCount}step${stepsCount === 1 ? "" : "s"})` : config.steps !== void 0 ? "skip(0steps)" : "runFrame(legacy)";
1176
+ const engineLabel = config.engine ? `${config.engine.runtime}+${config.engine.backend}/${config.engine.format}` : "default";
1177
+ this.ctx.logger.info(
1178
+ "attachCamera",
1179
+ {
1180
+ tags: { deviceId: config.deviceId },
1181
+ meta: {
1182
+ detectionMode: config.detectionMode,
1183
+ audioMode: config.audioMode,
1184
+ motionFps: config.motionFps,
1185
+ detectionFps: config.detectionFps,
1186
+ motionSources: config.motionSources,
1187
+ dispatch,
1188
+ engine: engineLabel
1189
+ }
1190
+ }
1191
+ );
1192
+ return { success: true };
1193
+ }
1194
+ async detachCamera(input) {
1195
+ this.detachInternal(input.deviceId);
1196
+ return { success: true };
1197
+ }
1198
+ async reportMotion(input) {
1199
+ this.runner?.reportMotion(input.deviceId, input.detected, input.source, input.regions);
1200
+ return { success: true };
1201
+ }
1202
+ /**
1203
+ * Periodic per-camera step roster dump. Once every
1204
+ * STEP_LOG_INTERVAL_MS (30s) emits one log line per attached camera
1205
+ * with the configured detection step tree + audio classifier branch
1206
+ * so an operator looking at the agent log can quickly see what each
1207
+ * camera is currently running without crossing tRPC. Skips when no
1208
+ * cameras are attached so quiet dev runs stay silent.
1209
+ */
1210
+ logAttachedSteps() {
1211
+ if (this.attached.size === 0) return;
1212
+ for (const [deviceId, attachment] of this.attached) {
1213
+ const cfg = attachment.config;
1214
+ const detectionSteps = cfg.steps && cfg.steps.length > 0 ? this.flattenSteps(cfg.steps).filter((s) => s.enabled) : [];
1215
+ const detectionLabel = detectionSteps.length > 0 ? detectionSteps.map((s) => `${s.addonId}/${s.modelId}`).join(" → ") : "<none>";
1216
+ const audioLabel = cfg.audio && cfg.audio.enabled ? `${cfg.audio.engine.runtime}/${cfg.audio.engine.backend}/${cfg.audio.modelId}` : "<off>";
1217
+ const engineLabel = cfg.engine ? `${cfg.engine.runtime}/${cfg.engine.backend}${cfg.engine.device ? `/${cfg.engine.device}` : ""}` : "<unset>";
1218
+ this.ctx.logger.info("Camera pipeline roster", {
1219
+ tags: { deviceId },
1220
+ meta: {
1221
+ phase: "roster",
1222
+ intervalSec: STEP_LOG_INTERVAL_MS / 1e3,
1223
+ pipelineEnabled: cfg.pipelineEnabled,
1224
+ motionSources: cfg.motionSources,
1225
+ motionFps: cfg.motionFps,
1226
+ detectionFps: cfg.detectionFps,
1227
+ engine: engineLabel,
1228
+ videoSteps: detectionLabel,
1229
+ videoStepCount: detectionSteps.length,
1230
+ audio: audioLabel
1231
+ }
1232
+ });
1233
+ }
1234
+ }
1235
+ /** Recursively flatten the step tree → ordered list of every node. */
1236
+ flattenSteps(steps) {
1237
+ const out = [];
1238
+ const walk = (s) => {
1239
+ out.push(s);
1240
+ if (s.children) {
1241
+ for (const c of s.children) walk(c);
1242
+ }
1243
+ };
1244
+ for (const s of steps) walk(s);
1245
+ return out;
1246
+ }
1247
+ detachInternal(deviceId) {
1248
+ const attachment = this.attached.get(deviceId);
1249
+ if (!attachment) return;
1250
+ attachment.motionUnsubscribe?.();
1251
+ attachment.detectionUnsubscribe?.();
1252
+ this.attached.delete(deviceId);
1253
+ this.lastMotionAt.delete(deviceId);
1254
+ this.lastEmittedCameraMetrics.delete(deviceId);
1255
+ this.runner?.unregisterCamera(deviceId);
1256
+ this.ctx?.logger.info("detachCamera", { tags: { deviceId } });
1257
+ }
1258
+ async getLocalLoad() {
1259
+ const metrics = this.runner?.getMetrics() ?? { avgInferenceTimeMs: 0, queueDepth: 0 };
1260
+ const allCameraMetrics = this.runner?.getAllCameraMetrics() ?? [];
1261
+ let activeCameras = 0;
1262
+ let totalActualFps = 0;
1263
+ for (const cm of allCameraMetrics) {
1264
+ if (cm.phase === "active") activeCameras++;
1265
+ totalActualFps += cm.actualFps;
1266
+ }
1267
+ return {
1268
+ nodeId: this.nodeId,
1269
+ attachedCameras: this.attached.size,
1270
+ activeCameras,
1271
+ avgInferenceFps: totalActualFps,
1272
+ avgInferenceTimeMs: metrics.avgInferenceTimeMs,
1273
+ queueDepthTotal: metrics.queueDepth,
1274
+ hardware: {
1275
+ hasGpu: false,
1276
+ inferenceBackend: void 0
1277
+ }
1278
+ };
1279
+ }
1280
+ async getLocalMetrics() {
1281
+ const m = this.runner?.getMetrics() ?? { activeCameras: 0, throttledCameras: 0, avgInferenceTimeMs: 0, queueDepth: 0 };
1282
+ return { nodeId: this.nodeId, ...m };
1283
+ }
1284
+ async getCameraMetrics(input) {
1285
+ return this.runner?.getCameraMetrics(input.deviceId) ?? null;
1286
+ }
1287
+ getAllCameraMetrics() {
1288
+ return this.runner?.getAllCameraMetrics() ?? [];
1289
+ }
1290
+ getLocalCameras() {
1291
+ return [...this.attached.keys()];
1292
+ }
1293
+ // ── Internal: broker subscription wiring ─────────────────────────────
1294
+ async subscribeMotionFrames(config) {
1295
+ const ctx = this.ctx;
1296
+ const runner = this.runner;
1297
+ if (!ctx || !runner) return null;
1298
+ const log = this.ctx.logger.withTags({ deviceId: config.deviceId });
1299
+ const api = this.ctx.api;
1300
+ if (!api) {
1301
+ log.warn("subscribeMotionFrames: this.ctx.api not available");
1302
+ return null;
1303
+ }
1304
+ const motionBrokerId = `${config.deviceId}/${config.motionStreamId}`;
1305
+ const motionBroker = await api.streamBroker.getBroker.query({ brokerId: motionBrokerId });
1306
+ if (!motionBroker) {
1307
+ log.warn("subscribeMotionFrames: no broker found", { meta: { brokerId: motionBrokerId } });
1308
+ return null;
1309
+ }
1310
+ return motionBroker.onDecodedFrame(
1311
+ (frame) => {
1312
+ runner.enqueueMotionFrame(config.deviceId, frame);
1313
+ },
1314
+ { maxFps: config.motionFps, format: "gray", tag: "motion" }
1315
+ );
1316
+ }
1317
+ handleDetectionStreamChange(deviceId, action) {
1318
+ const attachment = this.attached.get(deviceId);
1319
+ if (!attachment) return;
1320
+ if (action === "subscribe") {
1321
+ void this.subscribeDetectionFrames(attachment.config).then((unsub) => {
1322
+ attachment.detectionUnsubscribe = unsub;
1323
+ });
1324
+ } else {
1325
+ attachment.detectionUnsubscribe?.();
1326
+ attachment.detectionUnsubscribe = null;
1327
+ }
1328
+ }
1329
+ /**
1330
+ * Bridge runner phase transitions to the device's `motion` runtime
1331
+ * state + the bus. Single ownership point — every motion source
1332
+ * (analyzer, onboard, future variants) funnels through the runner's
1333
+ * phase machine and lands here.
1334
+ *
1335
+ * - Cap-state via the unified `device-state.setCapSlice` API.
1336
+ * `autoClearAfterMs = cooldownMs` on ON, `null` on OFF.
1337
+ * `lastDetectedAt` is preserved across OFF using `lastMotionAt`.
1338
+ * - Bus event `MotionOnMotionChanged` fires alongside for consumers
1339
+ * that prefer event-driven over runtime-state polling.
1340
+ */
1341
+ handlePhaseChanged(deviceId, phase, meta) {
1342
+ const detected = phase === "active";
1343
+ if (detected) this.lastMotionAt.set(deviceId, meta.timestamp);
1344
+ const lastDetectedAt = this.lastMotionAt.get(deviceId) ?? null;
1345
+ const slice = {
1346
+ detected,
1347
+ lastDetectedAt,
1348
+ autoClearAfterMs: detected ? meta.cooldownMs : null
1349
+ };
1350
+ void (async () => {
1351
+ const dev = await this.ctx.fetchDevice(deviceId);
1352
+ await dev.deviceState.setCapSlice({ capName: "motion", slice });
1353
+ })().catch((err) => {
1354
+ this.ctx.logger.debug("motion cap-state write failed", {
1355
+ tags: { deviceId },
1356
+ meta: { error: types.errMsg(err) }
1357
+ });
1358
+ });
1359
+ if (this.ctx.eventBus) {
1360
+ const from = detected ? "watching" : "active";
1361
+ const to = detected ? "active" : "watching";
1362
+ const reason = detected ? "motion_detected" : "cooldown_expired";
1363
+ const payload = {
1364
+ deviceId,
1365
+ from,
1366
+ to,
1367
+ reason,
1368
+ source: meta.source,
1369
+ cooldownMs: meta.cooldownMs,
1370
+ timestamp: meta.timestamp
1371
+ };
1372
+ this.ctx.eventBus.emit(types.createEvent(
1373
+ types.EventCategory.DetectionPhaseTransition,
1374
+ { type: "device", id: deviceId, addonId: this.ctx.id, deviceId, nodeId: this.nodeId },
1375
+ payload
1376
+ ));
1377
+ }
1378
+ }
1379
+ async subscribeDetectionFrames(config) {
1380
+ const ctx = this.ctx;
1381
+ const runner = this.runner;
1382
+ if (!ctx || !runner) return null;
1383
+ const log = this.ctx.logger.withTags({ deviceId: config.deviceId });
1384
+ const api = this.ctx.api;
1385
+ if (!api) {
1386
+ log.warn("subscribeDetectionFrames: this.ctx.api not available");
1387
+ return null;
1388
+ }
1389
+ const detectionBrokerId = `${config.deviceId}/${config.detectionStreamId}`;
1390
+ const detectionBroker = await api.streamBroker.getBroker.query({ brokerId: detectionBrokerId });
1391
+ if (!detectionBroker) {
1392
+ log.warn("subscribeDetectionFrames: no broker found", { meta: { brokerId: detectionBrokerId } });
1393
+ return null;
1394
+ }
1395
+ return detectionBroker.onDecodedFrame(
1396
+ (frame) => {
1397
+ runner.enqueueDetectionFrame(config.deviceId, frame);
1398
+ },
1399
+ // `format: 'rgb'` is the Phase 4 hot-path switch: detection now
1400
+ // requests raw RGB24 from the broker so the decoder skips the
1401
+ // sharp JPEG encode and the Python pool skips PIL JPEG decode.
1402
+ // When WebRTC also subscribes (jpeg), the broker derives JPEG
1403
+ // once per frame via its conversion cache — no double work.
1404
+ { maxFps: config.detectionFps, format: "rgb", tag: "detection" }
1405
+ );
1406
+ }
1407
+ // ── Internal: inference + motion callbacks ───────────────────────────
1408
+ async runInference(deviceId, frame) {
1409
+ const ctx = this.ctx;
1410
+ if (!ctx) return null;
1411
+ const log = this.ctx.logger.withTags({ deviceId });
1412
+ const api = this.ctx.api;
1413
+ if (!api) {
1414
+ log.error("runInference: this.ctx.api not available");
1415
+ return null;
1416
+ }
1417
+ const attachment = this.attached.get(deviceId);
1418
+ const camConfig = attachment?.config;
1419
+ const steps = camConfig?.steps;
1420
+ const engine = camConfig?.engine;
1421
+ if (!steps) {
1422
+ log.warn("runInference: no steps in attach config — skipping frame (legacy attach?)");
1423
+ return null;
1424
+ }
1425
+ if (steps.length === 0) {
1426
+ return null;
1427
+ }
1428
+ try {
1429
+ return await api.pipelineExecutor.runPipeline.mutate({
1430
+ // tRPC input is a mutable array; the attach payload holds it
1431
+ // as readonly. One spread copy at the cap boundary is cheap
1432
+ // (pipeline step trees are tiny) and keeps the type surface
1433
+ // clean without casting.
1434
+ steps: [...steps],
1435
+ frame,
1436
+ deviceId,
1437
+ ...engine ? { engine } : {}
1438
+ });
1439
+ } catch (err) {
1440
+ const msg = types.errMsg(err);
1441
+ log.error("runInference failed", { meta: { error: msg } });
1442
+ return null;
1443
+ }
1444
+ }
1445
+ async runMotionAnalysis(deviceId, frame) {
1446
+ const ctx = this.ctx;
1447
+ const runner = this.runner;
1448
+ if (!ctx || !runner) return;
1449
+ const log = this.ctx.logger.withTags({ deviceId });
1450
+ const motionStart = Date.now();
1451
+ try {
1452
+ const api = this.ctx.api;
1453
+ if (!api) {
1454
+ log.warn("runMotionAnalysis: this.ctx.api not available");
1455
+ return;
1456
+ }
1457
+ const result = await api.motionDetection.analyze.mutate({ deviceId, frame: toFrameInput(frame) });
1458
+ if (!result) return;
1459
+ const detected = result.regions.length > 0;
1460
+ const prevDetected = this.lastAnalyzerDetected.get(deviceId) ?? false;
1461
+ if (detected !== prevDetected) {
1462
+ this.lastAnalyzerDetected.set(deviceId, detected);
1463
+ if (this.ctx.eventBus) {
1464
+ this.ctx.eventBus.emit(types.createEvent(
1465
+ types.EventCategory.MotionOnMotionChanged,
1466
+ // EventSource wrapper kept symmetric with the onboard
1467
+ // emit (Reolink/ONVIF/etc.) so consumers grouping by
1468
+ // addonId / deviceId see consistent provenance. `nodeId`
1469
+ // identifies which cluster node ran the analyzer.
1470
+ { type: "device", id: deviceId, addonId: this.ctx.id, deviceId, nodeId: this.nodeId },
1471
+ {
1472
+ deviceId,
1473
+ detected,
1474
+ timestamp: frame.timestamp,
1475
+ source: "analyzer",
1476
+ ...detected ? { regions: result.regions } : {}
1477
+ }
1478
+ ));
1479
+ }
1480
+ }
1481
+ if (this.ctx.eventBus) {
1482
+ const motionPayload = {
1483
+ detected,
1484
+ regionCount: result.regions.length,
1485
+ regions: result.regions.map((r) => ({
1486
+ bbox: { x: r.bbox.x, y: r.bbox.y, w: r.bbox.w, h: r.bbox.h },
1487
+ pixelCount: r.pixelCount,
1488
+ intensity: r.intensity
1489
+ })),
1490
+ frameWidth: frame.width,
1491
+ frameHeight: frame.height,
1492
+ analysisMs: result.analysisMs
1493
+ };
1494
+ const analyzerSource = { type: "device", id: deviceId, addonId: this.ctx.id, deviceId, nodeId: this.nodeId };
1495
+ this.ctx.eventBus.emit(types.createEvent(
1496
+ types.EventCategory.MotionAnalysis,
1497
+ analyzerSource,
1498
+ motionPayload
1499
+ ));
1500
+ const zonesPayload = {
1501
+ deviceId,
1502
+ timestamp: frame.timestamp,
1503
+ zones: result.rawRegions.map((r) => ({
1504
+ bbox: [r.bbox.x, r.bbox.y, r.bbox.x + r.bbox.w, r.bbox.y + r.bbox.h],
1505
+ pixelCount: r.pixelCount,
1506
+ changeScore: r.intensity / 255
1507
+ })),
1508
+ frameSize: { width: frame.width, height: frame.height }
1509
+ };
1510
+ this.ctx.eventBus.emit(types.createEvent(
1511
+ types.EventCategory.MotionZonesRaw,
1512
+ analyzerSource,
1513
+ zonesPayload
1514
+ ));
1515
+ }
1516
+ runner.timingSampler.addMotionSample(deviceId, Date.now() - motionStart);
1517
+ } catch (error) {
1518
+ const msg = types.errMsg(error);
1519
+ log.error("runMotionAnalysis failed", { meta: { error: msg } });
1520
+ }
1521
+ }
1522
+ emitInferenceResult(deviceId, _frame, result) {
1523
+ const ctx = this.ctx;
1524
+ if (!ctx?.eventBus) return;
1525
+ const payload = {
1526
+ deviceId,
1527
+ frame: result,
1528
+ nodeId: this.nodeId
1529
+ };
1530
+ this.ctx.eventBus.emit(types.createEvent(
1531
+ types.EventCategory.PipelineInferenceResult,
1532
+ { type: "device", id: deviceId, nodeId: this.nodeId },
1533
+ payload
1534
+ ));
1535
+ }
1536
+ /**
1537
+ * Emit periodic metric snapshots: one runner-load event for the
1538
+ * node + one camera-metrics event per attached camera. Subscribed
1539
+ * by admin-ui dashboards (LiveLoadPanel, NodeDetailHeader,
1540
+ * CameraStreamPanel) to drive live overlays without polling.
1541
+ *
1542
+ * Skipped when there are no cameras attached so quiet dev runs
1543
+ * don't emit needless bus traffic. The runner-load event is still
1544
+ * emitted in that case because the dashboards rely on it to see
1545
+ * "agent reachable, idle".
1546
+ */
1547
+ emitMetricsSnapshot() {
1548
+ const ctx = this.ctx;
1549
+ const runner = this.runner;
1550
+ if (!ctx?.eventBus || !runner) return;
1551
+ const timestamp = Date.now();
1552
+ void this.getLocalLoad().then((load) => {
1553
+ if (!ctx.eventBus) return;
1554
+ const json = JSON.stringify(load);
1555
+ const prev = this.lastEmittedRunnerLoad;
1556
+ const heartbeatDue = !prev || timestamp - prev.emittedAt >= METRICS_HEARTBEAT_MS;
1557
+ if (prev && prev.json === json && !heartbeatDue) return;
1558
+ this.lastEmittedRunnerLoad = { json, emittedAt: timestamp };
1559
+ ctx.eventBus.emit(types.createEvent(
1560
+ types.EventCategory.PipelineRunnerLoadSnapshot,
1561
+ { type: "node", id: this.nodeId, nodeId: this.nodeId },
1562
+ { nodeId: this.nodeId, load, timestamp }
1563
+ ));
1564
+ }).catch(() => {
1565
+ });
1566
+ if (this.attached.size === 0) return;
1567
+ for (const deviceId of this.attached.keys()) {
1568
+ const metrics = runner.getCameraMetrics(deviceId);
1569
+ if (!metrics) continue;
1570
+ const json = JSON.stringify(metrics);
1571
+ const prev = this.lastEmittedCameraMetrics.get(deviceId);
1572
+ const heartbeatDue = !prev || timestamp - prev.emittedAt >= METRICS_HEARTBEAT_MS;
1573
+ if (prev && prev.json === json && !heartbeatDue) continue;
1574
+ this.lastEmittedCameraMetrics.set(deviceId, { json, emittedAt: timestamp });
1575
+ ctx.eventBus.emit(types.createEvent(
1576
+ types.EventCategory.PipelineCameraMetricsSnapshot,
1577
+ { type: "device", id: deviceId, nodeId: this.nodeId },
1578
+ { deviceId, nodeId: this.nodeId, metrics, timestamp }
1579
+ ));
1580
+ }
1581
+ }
1582
+ // ── Standard ICamstackAddon — three-level settings API (Phase 3) ─────
1583
+ //
1584
+ // The runner is a per-node addon with only ADDON-LEVEL settings (no
1585
+ // per-device overrides, no cluster-wide tunables). All four tuning
1586
+ // fields live in `getAddonSettings()`. When the UI surface moves in
1587
+ // Phase 9 these will be rendered under Pipeline -> node -> Settings.
1588
+ globalSettingsSchema() {
1589
+ return this.schema({
1590
+ sections: [
1591
+ {
1592
+ id: "pipeline-runner-tuning",
1593
+ title: "Pipeline Runner",
1594
+ tab: "scheduler",
1595
+ description: "Per-node detection scheduler tuning. Change only if you understand the pipeline internals.",
1596
+ columns: 2,
1597
+ fields: [
1598
+ {
1599
+ type: "slider",
1600
+ key: "maxConcurrentInferences",
1601
+ label: "Scheduler concurrency",
1602
+ description: 'Max parallel inferences the runner scheduler allows across all cameras on this node. Distinct from the detection-pipeline inference-pool worker count (Pipeline tab → "Worker concurrency"), which controls Python-side thread pool sizing inside a single inference job.',
1603
+ min: 1,
1604
+ max: 16,
1605
+ step: 1,
1606
+ default: DEFAULT_CONFIG.maxConcurrentInferences,
1607
+ showValue: true
1608
+ },
1609
+ {
1610
+ type: "slider",
1611
+ key: "maxQueueDepth",
1612
+ label: "Max queue depth",
1613
+ description: "Maximum frames held per camera before dropping.",
1614
+ min: 5,
1615
+ max: 100,
1616
+ step: 5,
1617
+ default: DEFAULT_CONFIG.maxQueueDepth,
1618
+ showValue: true
1619
+ },
1620
+ {
1621
+ type: "slider",
1622
+ key: "targetLoadPercent",
1623
+ label: "Target load",
1624
+ description: "Percentage of inference capacity to target before throttling FPS.",
1625
+ min: 50,
1626
+ max: 100,
1627
+ step: 5,
1628
+ default: DEFAULT_CONFIG.targetLoadPercent,
1629
+ unit: "%",
1630
+ showValue: true
1631
+ },
1632
+ {
1633
+ type: "slider",
1634
+ key: "minThrottledFps",
1635
+ label: "Min throttled FPS",
1636
+ description: "Lowest FPS the runner will allow when load-shedding.",
1637
+ min: 1,
1638
+ max: 10,
1639
+ step: 1,
1640
+ default: DEFAULT_CONFIG.minThrottledFps,
1641
+ showValue: true
1642
+ }
1643
+ ]
1644
+ }
1645
+ ]
1646
+ });
1647
+ }
1648
+ async onConfigChanged() {
1649
+ this.runner?.updateLimits(this.config);
1650
+ this.ctx.logger.info(
1651
+ "pipeline-runner tuning updated",
1652
+ {
1653
+ meta: {
1654
+ maxQueueDepth: this.config.maxQueueDepth,
1655
+ maxConcurrentInferences: this.config.maxConcurrentInferences,
1656
+ targetLoadPercent: this.config.targetLoadPercent,
1657
+ minThrottledFps: this.config.minThrottledFps
1658
+ }
1659
+ }
1660
+ );
1661
+ }
1662
+ }
1663
+ exports.FrameQueue = FrameQueue;
1664
+ exports.PipelineRunner = PipelineRunner;
1665
+ exports.PipelineTimingSampler = PipelineTimingSampler;
1666
+ exports.Semaphore = Semaphore;
1667
+ exports.customActions = pipelineRunnerBenchActions;
1668
+ exports.default = PipelineRunnerAddon;
1669
+ //# sourceMappingURL=index.js.map