@camstack/addon-pipeline 0.1.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. package/dist/audio-analyzer/index.js +723 -0
  2. package/dist/audio-analyzer/index.js.map +1 -0
  3. package/dist/audio-analyzer/index.mjs +683 -0
  4. package/dist/audio-analyzer/index.mjs.map +1 -0
  5. package/dist/audio-codec-nodeav/index.js +467 -0
  6. package/dist/audio-codec-nodeav/index.js.map +1 -0
  7. package/dist/audio-codec-nodeav/index.mjs +467 -0
  8. package/dist/audio-codec-nodeav/index.mjs.map +1 -0
  9. package/dist/decoder-nodeav/index.js +929 -0
  10. package/dist/decoder-nodeav/index.js.map +1 -0
  11. package/dist/decoder-nodeav/index.mjs +907 -0
  12. package/dist/decoder-nodeav/index.mjs.map +1 -0
  13. package/dist/detection-pipeline/index.js +5766 -0
  14. package/dist/detection-pipeline/index.js.map +1 -0
  15. package/dist/detection-pipeline/index.mjs +5725 -0
  16. package/dist/detection-pipeline/index.mjs.map +1 -0
  17. package/dist/index-D_cl0Qqb.js +5791 -0
  18. package/dist/index-D_cl0Qqb.js.map +1 -0
  19. package/dist/index-UbcdLS7a.mjs +5790 -0
  20. package/dist/index-UbcdLS7a.mjs.map +1 -0
  21. package/dist/motion-wasm/index.js +476 -0
  22. package/dist/motion-wasm/index.js.map +1 -0
  23. package/dist/motion-wasm/index.mjs +454 -0
  24. package/dist/motion-wasm/index.mjs.map +1 -0
  25. package/dist/pipeline-runner/index.js +1669 -0
  26. package/dist/pipeline-runner/index.js.map +1 -0
  27. package/dist/pipeline-runner/index.mjs +1647 -0
  28. package/dist/pipeline-runner/index.mjs.map +1 -0
  29. package/dist/stream-broker/@mf-types/compiled-types/stream-broker/widgets/StreamBrokerPanel.d.ts +21 -0
  30. package/dist/stream-broker/@mf-types/compiled-types/stream-broker/widgets/index.d.ts +13 -0
  31. package/dist/stream-broker/@mf-types/widgets.d.ts +2 -0
  32. package/dist/stream-broker/@mf-types.d.ts +3 -0
  33. package/dist/stream-broker/@mf-types.zip +0 -0
  34. package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare___mf_0_camstack_mf_1_sdk__loadShare__.mjs-h5aXOPSA.mjs +12 -0
  35. package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare___mf_0_camstack_mf_1_types__loadShare__.mjs-C-URP6DW.mjs +17 -0
  36. package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare___mf_0_camstack_mf_1_ui_mf_2_library__loadShare__.mjs-69eEmXwl.mjs +20 -0
  37. package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare___mf_0_tanstack_mf_1_react_mf_2_query__loadShare__.mjs-U1EUeEPs.mjs +104 -0
  38. package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare___mf_0_trpc_mf_1_client__loadShare__.mjs-DeouEaSs.mjs +85 -0
  39. package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare___mf_0_trpc_mf_1_react_mf_2_query__loadShare__.mjs-DHUwjbb9.mjs +62 -0
  40. package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare__react__loadShare__.mjs-DePVYdid.mjs +85 -0
  41. package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare__react__loadShare__.mjs_commonjs-proxy-CBlCGyx5.mjs +29 -0
  42. package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare__react_mf_1_jsx_mf_2_runtime__loadShare__.mjs-gBEZsQrp.mjs +36 -0
  43. package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare__react_mf_2_dom__loadShare__.mjs-DYEKzzY-.mjs +45 -0
  44. package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare__react_mf_2_dom__loadShare__.mjs_commonjs-proxy-DZchZKbW.mjs +6 -0
  45. package/dist/stream-broker/__mfe_internal__addon_stream_broker_widgets__loadShare__react_mf_2_dom_mf_1_client__loadShare__.mjs-DICOtMTl.mjs +34 -0
  46. package/dist/stream-broker/_stub.js +752 -0
  47. package/dist/stream-broker/_virtual_mf-localSharedImportMap___mfe_internal__addon_stream_broker_widgets-D6o1e2ed.mjs +156 -0
  48. package/dist/stream-broker/client-BK73l2KT.mjs +10063 -0
  49. package/dist/stream-broker/getErrorShape-BPSzUA7W-TlK8ipWe.mjs +211 -0
  50. package/dist/stream-broker/hostInit-RCeroTVY.mjs +168 -0
  51. package/dist/stream-broker/index-BYclbfM0.mjs +15806 -0
  52. package/dist/stream-broker/index-BhXZh4lQ.mjs +1617 -0
  53. package/dist/stream-broker/index-BxHaCH3N.mjs +725 -0
  54. package/dist/stream-broker/index-D2-K2YJ7.mjs +19268 -0
  55. package/dist/stream-broker/index-IUYKHbxX.mjs +185 -0
  56. package/dist/stream-broker/index-Ss9m7Jum.mjs +2603 -0
  57. package/dist/stream-broker/index-ns1fRD30.mjs +435 -0
  58. package/dist/stream-broker/index-xncRG7-x.mjs +2713 -0
  59. package/dist/stream-broker/index.js +11171 -0
  60. package/dist/stream-broker/index.js.map +1 -0
  61. package/dist/stream-broker/index.mjs +11130 -0
  62. package/dist/stream-broker/index.mjs.map +1 -0
  63. package/dist/stream-broker/jsx-runtime-ZdY5pIZz.mjs +55 -0
  64. package/dist/stream-broker/remoteEntry.js +2973 -0
  65. package/dist/stream-broker/virtualExposes-pCd777Rp.mjs +42 -0
  66. package/package.json +258 -0
  67. package/python/__pycache__/inference_pool.cpython-313.pyc +0 -0
  68. package/python/inference_pool.py +1088 -0
  69. package/python/postprocessors/__init__.py +24 -0
  70. package/python/postprocessors/__pycache__/__init__.cpython-312.pyc +0 -0
  71. package/python/postprocessors/__pycache__/__init__.cpython-313.pyc +0 -0
  72. package/python/postprocessors/__pycache__/_safety.cpython-313.pyc +0 -0
  73. package/python/postprocessors/__pycache__/arcface.cpython-312.pyc +0 -0
  74. package/python/postprocessors/__pycache__/arcface.cpython-313.pyc +0 -0
  75. package/python/postprocessors/__pycache__/ctc.cpython-312.pyc +0 -0
  76. package/python/postprocessors/__pycache__/ctc.cpython-313.pyc +0 -0
  77. package/python/postprocessors/__pycache__/saliency.cpython-312.pyc +0 -0
  78. package/python/postprocessors/__pycache__/saliency.cpython-313.pyc +0 -0
  79. package/python/postprocessors/__pycache__/scrfd.cpython-312.pyc +0 -0
  80. package/python/postprocessors/__pycache__/scrfd.cpython-313.pyc +0 -0
  81. package/python/postprocessors/__pycache__/softmax.cpython-312.pyc +0 -0
  82. package/python/postprocessors/__pycache__/softmax.cpython-313.pyc +0 -0
  83. package/python/postprocessors/__pycache__/yamnet.cpython-312.pyc +0 -0
  84. package/python/postprocessors/__pycache__/yamnet.cpython-313.pyc +0 -0
  85. package/python/postprocessors/__pycache__/yolo.cpython-312.pyc +0 -0
  86. package/python/postprocessors/__pycache__/yolo.cpython-313.pyc +0 -0
  87. package/python/postprocessors/__pycache__/yolo_seg.cpython-312.pyc +0 -0
  88. package/python/postprocessors/__pycache__/yolo_seg.cpython-313.pyc +0 -0
  89. package/python/postprocessors/arcface.py +31 -0
  90. package/python/postprocessors/ctc.py +68 -0
  91. package/python/postprocessors/saliency.py +44 -0
  92. package/python/postprocessors/scrfd.py +212 -0
  93. package/python/postprocessors/softmax.py +43 -0
  94. package/python/postprocessors/yamnet.py +41 -0
  95. package/python/postprocessors/yolo.py +278 -0
  96. package/python/postprocessors/yolo_seg.py +247 -0
  97. package/python/requirements-coreml.txt +4 -0
  98. package/python/requirements-onnxruntime.txt +3 -0
  99. package/python/requirements-openvino.txt +3 -0
  100. package/python/requirements.txt +9 -0
  101. package/swift/audio-analyzer/apple-sound-classifier +0 -0
  102. package/swift/audio-analyzer/apple-sound-classifier.swift +213 -0
  103. package/swift/detection-pipeline/apple-sound-classifier +0 -0
  104. package/swift/detection-pipeline/apple-sound-classifier.swift +196 -0
  105. package/wasm/assembly/index.ts +290 -0
  106. package/wasm/assembly/tsconfig.json +4 -0
  107. package/wasm/motion.wasm +0 -0
@@ -0,0 +1,1647 @@
1
+ import { defineCustomActions, customAction, BaseAddon, EventCategory, pipelineRunnerCapability, errMsg, createEvent } from "@camstack/types";
2
+ import { z } from "zod";
3
+ class FrameQueue {
4
+ constructor(maxSize) {
5
+ this.maxSize = maxSize;
6
+ }
7
+ latest = null;
8
+ _droppedFrames = 0;
9
+ enqueue(frame) {
10
+ if (this.latest !== null) {
11
+ this._droppedFrames++;
12
+ }
13
+ this.latest = frame;
14
+ }
15
+ dequeue() {
16
+ const frame = this.latest ?? void 0;
17
+ this.latest = null;
18
+ return frame;
19
+ }
20
+ get size() {
21
+ return this.latest !== null ? 1 : 0;
22
+ }
23
+ get droppedFrames() {
24
+ return this._droppedFrames;
25
+ }
26
+ clear() {
27
+ this.latest = null;
28
+ }
29
+ }
30
+ class Semaphore {
31
+ _concurrency;
32
+ _available;
33
+ waiters = [];
34
+ constructor(concurrency) {
35
+ this._concurrency = concurrency;
36
+ this._available = concurrency;
37
+ }
38
+ get concurrency() {
39
+ return this._concurrency;
40
+ }
41
+ get available() {
42
+ return this._available;
43
+ }
44
+ /**
45
+ * Change the concurrency limit at runtime. Growing wakes as many
46
+ * pending waiters as possible without exceeding the new headroom;
47
+ * shrinking simply caps `_available` to `max(0, _available + delta)`.
48
+ * In-flight permits are never revoked — the excess will drain
49
+ * naturally as existing callers release.
50
+ */
51
+ resize(newConcurrency) {
52
+ if (newConcurrency < 1) throw new Error("Semaphore: concurrency must be >= 1");
53
+ const delta = newConcurrency - this._concurrency;
54
+ this._concurrency = newConcurrency;
55
+ this._available = Math.max(0, this._available + delta);
56
+ while (this._available > 0 && this.waiters.length > 0) {
57
+ const next = this.waiters.shift();
58
+ if (next) next();
59
+ }
60
+ }
61
+ async acquire() {
62
+ if (this._available > 0) {
63
+ this._available--;
64
+ return () => this.release();
65
+ }
66
+ return new Promise((resolve) => {
67
+ this.waiters.push(() => {
68
+ this._available--;
69
+ resolve(() => this.release());
70
+ });
71
+ });
72
+ }
73
+ release() {
74
+ this._available++;
75
+ const next = this.waiters.shift();
76
+ if (next) next();
77
+ }
78
+ }
79
+ const REPORT_INTERVAL_MS = 1e4;
80
+ class PipelineTimingSampler {
81
+ detSamples = /* @__PURE__ */ new Map();
82
+ motSamples = /* @__PURE__ */ new Map();
83
+ audioSamples = /* @__PURE__ */ new Map();
84
+ droppedFrames = 0;
85
+ reportTimer = null;
86
+ log = null;
87
+ runtimeInfo = {};
88
+ setLogger(logger) {
89
+ this.log = logger;
90
+ }
91
+ start() {
92
+ if (this.reportTimer) return;
93
+ this.reportTimer = setInterval(() => this.report(), REPORT_INTERVAL_MS);
94
+ }
95
+ stop() {
96
+ if (this.reportTimer) {
97
+ clearInterval(this.reportTimer);
98
+ this.reportTimer = null;
99
+ }
100
+ }
101
+ addSample(deviceId, s) {
102
+ if (!this.detSamples.has(deviceId)) this.detSamples.set(deviceId, []);
103
+ this.detSamples.get(deviceId).push(s);
104
+ }
105
+ addMotionSample(deviceId, ms) {
106
+ if (!this.motSamples.has(deviceId)) this.motSamples.set(deviceId, []);
107
+ this.motSamples.get(deviceId).push(ms);
108
+ }
109
+ addAudioSample(deviceId, s) {
110
+ if (!this.audioSamples.has(deviceId)) this.audioSamples.set(deviceId, []);
111
+ this.audioSamples.get(deviceId).push(s);
112
+ }
113
+ addDrop() {
114
+ this.droppedFrames++;
115
+ }
116
+ report() {
117
+ if (!this.log) return;
118
+ const dropped = this.droppedFrames;
119
+ this.droppedFrames = 0;
120
+ const avg = (arr) => arr.length > 0 ? Math.round(arr.reduce((a, b) => a + b, 0) / arr.length) : 0;
121
+ const max = (arr) => arr.length > 0 ? Math.round(Math.max(...arr)) : 0;
122
+ const p95 = (arr) => {
123
+ if (arr.length === 0) return 0;
124
+ const sorted = [...arr].sort((a, b) => a - b);
125
+ return Math.round(sorted[Math.floor(sorted.length * 0.95)] ?? sorted[sorted.length - 1]);
126
+ };
127
+ const rt = this.runtimeInfo;
128
+ for (const [deviceId, det] of this.detSamples) {
129
+ if (det.length === 0) continue;
130
+ const e2e = det.map((s) => s.endToEnd);
131
+ const inf = det.map((s) => s.inference);
132
+ const totalDet = det.reduce((s, d) => s + d.detections, 0);
133
+ this.log.info(
134
+ "pipeline stats",
135
+ {
136
+ tags: { deviceId },
137
+ meta: {
138
+ frames: det.length,
139
+ intervalSec: REPORT_INTERVAL_MS / 1e3,
140
+ e2e: { avg: avg(e2e), p95: p95(e2e), max: max(e2e) },
141
+ inference: { avg: avg(inf), p95: p95(inf) },
142
+ detections: totalDet,
143
+ dropped,
144
+ pipelineRuntime: rt.pipelineRuntime ?? null,
145
+ pipelineModels: rt.pipelineModels ?? null
146
+ }
147
+ }
148
+ );
149
+ }
150
+ this.detSamples.clear();
151
+ for (const [deviceId, mot] of this.motSamples) {
152
+ if (mot.length === 0) continue;
153
+ this.log.info(
154
+ "motion stats",
155
+ {
156
+ tags: { deviceId },
157
+ meta: {
158
+ frames: mot.length,
159
+ intervalSec: REPORT_INTERVAL_MS / 1e3,
160
+ avg: avg(mot),
161
+ p95: p95(mot),
162
+ max: max(mot)
163
+ // motionAddon: rt.motionAddon ?? null,
164
+ }
165
+ }
166
+ );
167
+ }
168
+ this.motSamples.clear();
169
+ for (const [deviceId, aud] of this.audioSamples) {
170
+ if (aud.length === 0) continue;
171
+ const classifyTimes = aud.filter((a) => a.classifyMs > 0).map((a) => a.classifyMs);
172
+ const classified = aud.filter((a) => a.topLabel !== null);
173
+ const topLabels = /* @__PURE__ */ new Map();
174
+ for (const a of classified) {
175
+ if (a.topLabel) topLabels.set(a.topLabel, (topLabels.get(a.topLabel) ?? 0) + 1);
176
+ }
177
+ const topSummary = [...topLabels.entries()].sort((a, b) => b[1] - a[1]).slice(0, 3).map(([l, c]) => `${l}×${c}`).join(", ");
178
+ const avgDbfs = avg(aud.map((a) => Math.round(a.dbfs)));
179
+ this.log.info(
180
+ "audio stats",
181
+ {
182
+ tags: { deviceId },
183
+ meta: {
184
+ chunks: aud.length,
185
+ intervalSec: REPORT_INTERVAL_MS / 1e3,
186
+ classified: classified.length,
187
+ classifyAvgMs: classifyTimes.length > 0 ? avg(classifyTimes) : 0,
188
+ avgDbfs,
189
+ topLabels: topSummary,
190
+ audioEngine: rt.audioEngine ?? null
191
+ }
192
+ }
193
+ );
194
+ }
195
+ this.audioSamples.clear();
196
+ }
197
+ }
198
+ const DEFAULT_MOTION_COOLDOWN_MS = 3e4;
199
+ function toFrameInput$1(frame) {
200
+ return {
201
+ data: frame.data,
202
+ width: frame.width,
203
+ height: frame.height,
204
+ format: frame.format,
205
+ timestamp: frame.timestamp
206
+ };
207
+ }
208
+ class PipelineRunner {
209
+ // Config is mutable (not `readonly`) because `updateLimits()` hot-reloads
210
+ // the four tuning fields when the pipeline-runner addon's
211
+ // `updateAddonSettings` is invoked via the new three-level settings API.
212
+ // The callbacks (`processFrame`, `analyzeMotion`) are invariants captured
213
+ // at construction and never changed.
214
+ config;
215
+ cameras = /* @__PURE__ */ new Map();
216
+ semaphore;
217
+ resultCallbacks = [];
218
+ defaultRoundRobinKeys = [];
219
+ defaultRoundRobinIndex = 0;
220
+ intervalHandle = null;
221
+ detectionStreamHandler = null;
222
+ logger;
223
+ timingSampler = new PipelineTimingSampler();
224
+ constructor(config) {
225
+ this.config = config;
226
+ this.logger = config.logger;
227
+ this.semaphore = new Semaphore(config.maxConcurrentInferences);
228
+ }
229
+ /**
230
+ * Hot-reload the four tuning fields without tearing down the runner.
231
+ * - `maxConcurrentInferences`: resized on the live semaphore; in-flight
232
+ * permits are preserved, new capacity is available immediately.
233
+ * - `maxQueueDepth`: new `FrameQueue`s created from this point on use
234
+ * the updated ceiling. Existing per-camera queues are not resized
235
+ * (the FrameQueue implementation is latest-only and ignores maxSize
236
+ * anyway — see `frame-queue.ts` — so the field is effectively a
237
+ * metadata hint for observability).
238
+ * - `targetLoadPercent` / `minThrottledFps`: stored for future
239
+ * throttling logic (not yet consumed in the current runner body).
240
+ *
241
+ * Only keys present in the patch are overwritten; unspecified keys
242
+ * retain their current value. Any illegal combination (e.g.
243
+ * concurrency < 1) throws and leaves the runner unchanged.
244
+ */
245
+ updateLimits(patch) {
246
+ const next = {
247
+ ...this.config,
248
+ maxQueueDepth: patch.maxQueueDepth ?? this.config.maxQueueDepth,
249
+ maxConcurrentInferences: patch.maxConcurrentInferences ?? this.config.maxConcurrentInferences,
250
+ targetLoadPercent: patch.targetLoadPercent ?? this.config.targetLoadPercent,
251
+ minThrottledFps: patch.minThrottledFps ?? this.config.minThrottledFps
252
+ };
253
+ if (next.maxConcurrentInferences !== this.config.maxConcurrentInferences) {
254
+ this.semaphore.resize(next.maxConcurrentInferences);
255
+ }
256
+ this.config = next;
257
+ }
258
+ /** Read the current tuning fields for diagnostics / tests. */
259
+ getLimits() {
260
+ return {
261
+ maxQueueDepth: this.config.maxQueueDepth,
262
+ maxConcurrentInferences: this.config.maxConcurrentInferences,
263
+ targetLoadPercent: this.config.targetLoadPercent,
264
+ minThrottledFps: this.config.minThrottledFps
265
+ };
266
+ }
267
+ /** Set a handler called when the runner needs to subscribe/unsubscribe the detection stream. */
268
+ onDetectionStreamChange(handler) {
269
+ this.detectionStreamHandler = handler;
270
+ }
271
+ registerCamera(deviceId, registration) {
272
+ const motionQueue = new FrameQueue(this.config.maxQueueDepth);
273
+ const detectionQueue = new FrameQueue(this.config.maxQueueDepth);
274
+ const initialPhase = registration.detectionMode === "disabled" ? "idle" : registration.detectionMode === "always-on" ? "active" : "watching";
275
+ const state = {
276
+ registration,
277
+ motionQueue,
278
+ detectionQueue,
279
+ inferenceTimes: [],
280
+ processedCount: 0,
281
+ startTime: Date.now(),
282
+ phase: initialPhase,
283
+ motionCooldownTimer: null,
284
+ lastArmedSource: null,
285
+ lastArmedRegions: void 0
286
+ };
287
+ this.cameras.set(deviceId, state);
288
+ if (registration.detectionMode === "on-motion") {
289
+ this.defaultRoundRobinKeys.push(deviceId);
290
+ }
291
+ if (initialPhase === "active") {
292
+ this.detectionStreamHandler?.(deviceId, "subscribe");
293
+ const cooldownMs = registration.motionCooldownMs ?? DEFAULT_MOTION_COOLDOWN_MS;
294
+ this.config.onPhaseChanged?.(deviceId, "active", {
295
+ source: "analyzer",
296
+ regions: void 0,
297
+ timestamp: Date.now(),
298
+ cooldownMs
299
+ });
300
+ }
301
+ }
302
+ unregisterCamera(deviceId) {
303
+ const state = this.cameras.get(deviceId);
304
+ if (!state) return;
305
+ if (state.motionCooldownTimer !== null) {
306
+ clearTimeout(state.motionCooldownTimer);
307
+ state.motionCooldownTimer = null;
308
+ }
309
+ if (state.phase === "active") {
310
+ this.detectionStreamHandler?.(deviceId, "unsubscribe");
311
+ }
312
+ state.motionQueue.clear();
313
+ state.detectionQueue.clear();
314
+ this.cameras.delete(deviceId);
315
+ const idx = this.defaultRoundRobinKeys.indexOf(deviceId);
316
+ if (idx !== -1) {
317
+ this.defaultRoundRobinKeys.splice(idx, 1);
318
+ if (this.defaultRoundRobinIndex >= this.defaultRoundRobinKeys.length) {
319
+ this.defaultRoundRobinIndex = 0;
320
+ }
321
+ }
322
+ }
323
+ enqueueMotionFrame(deviceId, frame) {
324
+ const state = this.cameras.get(deviceId);
325
+ if (!state) return;
326
+ state.motionQueue.enqueue(frame);
327
+ }
328
+ enqueueDetectionFrame(deviceId, frame) {
329
+ const state = this.cameras.get(deviceId);
330
+ if (!state) return;
331
+ if (state.phase !== "active") return;
332
+ frame._enqueuedAt = Date.now();
333
+ state.detectionQueue.enqueue(frame);
334
+ }
335
+ /**
336
+ * Report a motion event for a camera. Drives the unified phase
337
+ * machine for both motion sources (analyzer + onboard):
338
+ *
339
+ * - Every `detected: true` (any source) clears + rearms the
340
+ * cooldown timer and transitions watching → active. The same
341
+ * timer applies regardless of which source(s) are configured;
342
+ * concurrent sources just keep refreshing the same window.
343
+ * - `detected: false` is a no-op. Onboard sources never send an
344
+ * explicit clear, and the analyzer's "false" pulses would
345
+ * otherwise fight the cooldown when motion paused briefly
346
+ * during a scene. The timer is the single closure path.
347
+ * - Timer expiry transitions active → watching.
348
+ *
349
+ * Always-on cameras silently ignore reportMotion calls — they're
350
+ * already in `active` and have no cooldown.
351
+ *
352
+ * `source` and `regions` propagate into the phase-transition event
353
+ * so the wrapping addon can attach them to the cap-state slice +
354
+ * bus event.
355
+ */
356
+ reportMotion(deviceId, detected, source = "analyzer", regions = void 0) {
357
+ const state = this.cameras.get(deviceId);
358
+ if (!state) return;
359
+ if (state.registration.detectionMode !== "on-motion") return;
360
+ if (!detected) return;
361
+ state.lastArmedSource = source;
362
+ state.lastArmedRegions = regions;
363
+ const cooldownMs = state.registration.motionCooldownMs ?? DEFAULT_MOTION_COOLDOWN_MS;
364
+ if (state.motionCooldownTimer !== null) {
365
+ clearTimeout(state.motionCooldownTimer);
366
+ state.motionCooldownTimer = null;
367
+ }
368
+ if (state.phase === "watching") {
369
+ this.transitionToActive(deviceId, state, source, regions, cooldownMs);
370
+ }
371
+ state.motionCooldownTimer = setTimeout(() => {
372
+ state.motionCooldownTimer = null;
373
+ this.transitionToWatching(deviceId, state, cooldownMs);
374
+ }, cooldownMs);
375
+ }
376
+ getPhase(deviceId) {
377
+ return this.cameras.get(deviceId)?.phase;
378
+ }
379
+ onResult(callback) {
380
+ this.resultCallbacks.push(callback);
381
+ }
382
+ start() {
383
+ if (this.intervalHandle !== null) return;
384
+ this.intervalHandle = setInterval(() => this.tick(), 10);
385
+ this.timingSampler.start();
386
+ }
387
+ stop() {
388
+ if (this.intervalHandle !== null) {
389
+ clearInterval(this.intervalHandle);
390
+ this.intervalHandle = null;
391
+ }
392
+ this.timingSampler.stop();
393
+ for (const state of this.cameras.values()) {
394
+ if (state.motionCooldownTimer !== null) {
395
+ clearTimeout(state.motionCooldownTimer);
396
+ state.motionCooldownTimer = null;
397
+ }
398
+ }
399
+ }
400
+ getMetrics() {
401
+ let totalQueueDepth = 0;
402
+ let totalInferenceTime = 0;
403
+ let totalInferenceCount = 0;
404
+ for (const state of this.cameras.values()) {
405
+ totalQueueDepth += state.motionQueue.size + state.detectionQueue.size;
406
+ for (const t of state.inferenceTimes) {
407
+ totalInferenceTime += t;
408
+ totalInferenceCount++;
409
+ }
410
+ }
411
+ return {
412
+ activeCameras: this.cameras.size,
413
+ throttledCameras: 0,
414
+ avgInferenceTimeMs: totalInferenceCount > 0 ? totalInferenceTime / totalInferenceCount : 0,
415
+ queueDepth: totalQueueDepth
416
+ };
417
+ }
418
+ getCameraMetrics(deviceId) {
419
+ const state = this.cameras.get(deviceId);
420
+ if (!state) return void 0;
421
+ const elapsedMs = Date.now() - state.startTime;
422
+ const elapsedSec = elapsedMs / 1e3;
423
+ const actualFps = elapsedSec > 0 ? state.processedCount / elapsedSec : 0;
424
+ const times = state.inferenceTimes;
425
+ const avgInference = times.length > 0 ? times.reduce((a, b) => a + b, 0) / times.length : 0;
426
+ return {
427
+ detectionMode: state.registration.detectionMode,
428
+ configuredFps: state.registration.fps,
429
+ actualFps,
430
+ queueDepth: state.motionQueue.size + state.detectionQueue.size,
431
+ avgInferenceTimeMs: avgInference,
432
+ droppedFrames: state.motionQueue.droppedFrames + state.detectionQueue.droppedFrames,
433
+ phase: state.phase
434
+ };
435
+ }
436
+ getAllCameraMetrics() {
437
+ const results = [];
438
+ for (const [deviceId] of this.cameras) {
439
+ const metrics = this.getCameraMetrics(deviceId);
440
+ if (metrics) {
441
+ results.push({ deviceId, ...metrics });
442
+ }
443
+ }
444
+ return results;
445
+ }
446
+ getAttachedCameras() {
447
+ return [...this.cameras.keys()];
448
+ }
449
+ transitionToActive(deviceId, state, source, regions, cooldownMs) {
450
+ state.phase = "active";
451
+ this.logger?.info("motion gate opened — phase=active", {
452
+ tags: { deviceId },
453
+ meta: { detectionMode: state.registration.detectionMode, source }
454
+ });
455
+ this.detectionStreamHandler?.(deviceId, "subscribe");
456
+ this.config.onPhaseChanged?.(deviceId, "active", {
457
+ source,
458
+ regions,
459
+ timestamp: Date.now(),
460
+ cooldownMs
461
+ });
462
+ }
463
+ transitionToWatching(deviceId, state, cooldownMs) {
464
+ state.phase = "watching";
465
+ state.detectionQueue.clear();
466
+ this.logger?.info("motion gate closed — phase=watching", {
467
+ tags: { deviceId },
468
+ meta: { lastSource: state.lastArmedSource }
469
+ });
470
+ this.detectionStreamHandler?.(deviceId, "unsubscribe");
471
+ const source = state.lastArmedSource ?? "analyzer";
472
+ this.config.onPhaseChanged?.(deviceId, "watching", {
473
+ source,
474
+ regions: void 0,
475
+ timestamp: Date.now(),
476
+ cooldownMs
477
+ });
478
+ state.lastArmedSource = null;
479
+ state.lastArmedRegions = void 0;
480
+ }
481
+ tick() {
482
+ this.drainMotionQueues();
483
+ if (this.semaphore.available <= 0) return;
484
+ const picked = this.pickNextDetectionFrame();
485
+ if (!picked) return;
486
+ const { deviceId, frame, state } = picked;
487
+ const frameInput = toFrameInput$1(frame);
488
+ void this.processWithSemaphore(deviceId, frame, frameInput, state, "detection");
489
+ }
490
+ drainMotionQueues() {
491
+ for (const [deviceId, state] of this.cameras) {
492
+ while (state.motionQueue.size > 0) {
493
+ const frame = state.motionQueue.dequeue();
494
+ if (frame) {
495
+ void this.config.analyzeMotion(deviceId, frame);
496
+ }
497
+ }
498
+ }
499
+ }
500
+ async processWithSemaphore(deviceId, frame, frameInput, state, streamType) {
501
+ const pickedAt = Date.now();
502
+ const captureTs = frame.timestamp;
503
+ const enqueuedAt = frame._enqueuedAt ?? captureTs;
504
+ const release = await this.semaphore.acquire();
505
+ const semAcquiredAt = Date.now();
506
+ try {
507
+ const result = await this.config.processFrame(deviceId, frameInput);
508
+ const inferDoneAt = Date.now();
509
+ const inferenceMs = inferDoneAt - semAcquiredAt;
510
+ state.inferenceTimes.push(inferenceMs);
511
+ if (state.inferenceTimes.length > 100) {
512
+ state.inferenceTimes.shift();
513
+ }
514
+ state.processedCount++;
515
+ if (result) {
516
+ await this.notifyCallbacks(deviceId, frame, result, streamType);
517
+ const emittedAt = Date.now();
518
+ this.timingSampler.addSample(deviceId, {
519
+ captureToEnqueue: enqueuedAt - captureTs,
520
+ queueWait: pickedAt - enqueuedAt,
521
+ semaphoreWait: semAcquiredAt - pickedAt,
522
+ inference: inferenceMs,
523
+ resultToEmit: emittedAt - inferDoneAt,
524
+ endToEnd: emittedAt - captureTs,
525
+ detections: result.detections?.length ?? 0
526
+ });
527
+ }
528
+ } finally {
529
+ release();
530
+ }
531
+ }
532
+ async notifyCallbacks(deviceId, frame, result, streamType) {
533
+ for (const callback of this.resultCallbacks) {
534
+ try {
535
+ await callback(deviceId, frame, result, streamType);
536
+ } catch {
537
+ }
538
+ }
539
+ }
540
+ pickNextDetectionFrame() {
541
+ for (const [deviceId, state] of this.cameras) {
542
+ if (state.registration.detectionMode === "always-on" && state.detectionQueue.size > 0) {
543
+ const frame = state.detectionQueue.dequeue();
544
+ return { deviceId, frame, state };
545
+ }
546
+ }
547
+ if (this.defaultRoundRobinKeys.length === 0) return null;
548
+ const startIndex = this.defaultRoundRobinIndex;
549
+ for (let i = 0; i < this.defaultRoundRobinKeys.length; i++) {
550
+ const idx = (startIndex + i) % this.defaultRoundRobinKeys.length;
551
+ const deviceId = this.defaultRoundRobinKeys[idx];
552
+ if (!deviceId) continue;
553
+ const state = this.cameras.get(deviceId);
554
+ if (!state) continue;
555
+ if (state.phase === "active" && state.detectionQueue.size > 0) {
556
+ this.defaultRoundRobinIndex = (idx + 1) % this.defaultRoundRobinKeys.length;
557
+ const frame = state.detectionQueue.dequeue();
558
+ if (!frame) continue;
559
+ return { deviceId, frame, state };
560
+ }
561
+ }
562
+ return null;
563
+ }
564
+ }
565
+ const BenchEngineChoiceSchema = z.object({
566
+ runtime: z.enum(["node", "python"]),
567
+ backend: z.string(),
568
+ format: z.enum(["onnx", "coreml", "openvino", "tflite", "pt"]),
569
+ device: z.string().optional()
570
+ });
571
+ const BenchStepSchema = z.lazy(() => z.object({
572
+ addonId: z.string(),
573
+ modelId: z.string(),
574
+ enabled: z.boolean(),
575
+ children: z.array(BenchStepSchema).optional()
576
+ }));
577
+ const CacheBenchFrameInputSchema = z.object({
578
+ imageBase64: z.string(),
579
+ ttlSeconds: z.number().int().positive().optional()
580
+ });
581
+ const CacheBenchFrameResultSchema = z.object({
582
+ frameId: z.string(),
583
+ width: z.number(),
584
+ height: z.number(),
585
+ expiresAt: z.number()
586
+ });
587
+ const ReleaseBenchFrameInputSchema = z.object({
588
+ frameId: z.string()
589
+ });
590
+ const ReleaseBenchFrameResultSchema = z.object({
591
+ released: z.boolean()
592
+ });
593
+ const RunSyntheticBenchInputSchema = z.object({
594
+ frameId: z.string(),
595
+ steps: z.array(BenchStepSchema).min(1),
596
+ parallel: z.number().int().min(1).max(32),
597
+ iterations: z.number().int().min(1).max(1e4),
598
+ warmup: z.number().int().min(0).max(100).optional(),
599
+ sessionId: z.string().optional(),
600
+ simulatePipeline: z.boolean().optional(),
601
+ engine: BenchEngineChoiceSchema.optional()
602
+ });
603
+ const TimingSplitSchema = z.object({
604
+ mean: z.number(),
605
+ p50: z.number(),
606
+ p95: z.number(),
607
+ p99: z.number()
608
+ });
609
+ const RunSyntheticBenchResultSchema = z.object({
610
+ runs: z.number(),
611
+ wallSec: z.number(),
612
+ fps: z.number(),
613
+ detectionsPerSec: z.number(),
614
+ avgDetections: z.number(),
615
+ callMs: TimingSplitSchema,
616
+ inferMs: z.number(),
617
+ preprocessMs: z.number(),
618
+ predictMs: z.number(),
619
+ batchSizeMean: z.number(),
620
+ batchSizeMax: z.number(),
621
+ engine: z.object({ runtime: z.string(), backend: z.string(), device: z.string().optional() }).optional(),
622
+ tuning: z.object({ batchMode: z.string(), windowMs: z.number(), maxBatchSize: z.number(), concurrency: z.number() }).optional(),
623
+ path: z.string().optional()
624
+ });
625
+ const pipelineRunnerBenchActions = defineCustomActions({
626
+ cacheBenchFrame: customAction(
627
+ CacheBenchFrameInputSchema,
628
+ CacheBenchFrameResultSchema,
629
+ { kind: "mutation" }
630
+ ),
631
+ releaseBenchFrame: customAction(
632
+ ReleaseBenchFrameInputSchema,
633
+ ReleaseBenchFrameResultSchema,
634
+ { kind: "mutation" }
635
+ ),
636
+ runSyntheticBench: customAction(
637
+ RunSyntheticBenchInputSchema,
638
+ RunSyntheticBenchResultSchema,
639
+ { kind: "mutation" }
640
+ )
641
+ });
642
+ const DEFAULT_CONFIG = {
643
+ maxQueueDepth: 30,
644
+ // CoreML window accumulator coalesces concurrent calls into a single
645
+ // model.predict([list]) — the more in-flight, the larger the batch and
646
+ // the higher the per-frame throughput. With concurrency=2 the window
647
+ // never fills past batch=2, capping the pool at ~50 fps single-node.
648
+ // 16 matches the slider ceiling and lines up with bench numbers
649
+ // (parallel=16 hits batch=7-8/8, sustaining ~140 fps full path).
650
+ maxConcurrentInferences: 16,
651
+ targetLoadPercent: 80,
652
+ minThrottledFps: 1
653
+ };
654
+ function toFrameInput(frame) {
655
+ return {
656
+ data: frame.data,
657
+ width: frame.width,
658
+ height: frame.height,
659
+ format: frame.format,
660
+ timestamp: frame.timestamp
661
+ };
662
+ }
663
+ const STEP_LOG_INTERVAL_MS = 3e4;
664
+ const METRICS_SNAPSHOT_INTERVAL_MS = 1e3;
665
+ const METRICS_HEARTBEAT_MS = 3e4;
666
+ class PipelineRunnerAddon extends BaseAddon {
667
+ runner = null;
668
+ attached = /* @__PURE__ */ new Map();
669
+ nodeId = "unknown";
670
+ stepLogTimer = null;
671
+ metricsSnapshotTimer = null;
672
+ unsubMotionEvents = null;
673
+ /** Last analyzer-detected state per device — gates the
674
+ * `MotionOnMotionChanged` emit in `runMotionAnalysis` to transitions
675
+ * only (otherwise we'd emit on every analyzer frame). */
676
+ lastAnalyzerDetected = /* @__PURE__ */ new Map();
677
+ /**
678
+ * Last positive motion timestamp per device — preserved across the
679
+ * OFF transition so the motion runtime-state slice keeps a stable
680
+ * `lastDetectedAt` after the cooldown closes the phase. Cleared on
681
+ * detach.
682
+ */
683
+ lastMotionAt = /* @__PURE__ */ new Map();
684
+ /**
685
+ * Snapshot-equality cache for metrics-snapshot defer. The runner
686
+ * fires per-camera metrics every `METRICS_SNAPSHOT_INTERVAL_MS`;
687
+ * for an idle camera (no inference, queue empty, fps=0) every tick
688
+ * carries an identical payload. We skip the bus emit when the
689
+ * payload deep-equals the previous one so the events tab + remote
690
+ * subscribers stop seeing 60 metrics-snapshots/min/camera that
691
+ * convey nothing. A periodic heartbeat re-emits every
692
+ * METRICS_HEARTBEAT_MS so consumers know the runner is still
693
+ * alive.
694
+ */
695
+ lastEmittedCameraMetrics = /* @__PURE__ */ new Map();
696
+ lastEmittedRunnerLoad = null;
697
+ /**
698
+ * In-memory bench-frame cache (decoded JPEG bytes). Populated by the
699
+ * `cacheBenchFrame` custom action. Fed into the synthetic-bench loop
700
+ * via the `frame: FrameInput` shape that mirrors what stream-broker
701
+ * delivers to this very addon during real camera detection.
702
+ */
703
+ benchFrameCache = /* @__PURE__ */ new Map();
704
+ benchFrameSweeper = null;
705
+ constructor() {
706
+ super({ ...DEFAULT_CONFIG });
707
+ }
708
+ async onInitialize() {
709
+ const raw = this.ctx.kernel.localNodeId ?? this.ctx.id;
710
+ this.nodeId = raw.includes("/") ? raw.split("/")[0] : raw;
711
+ this.runner = new PipelineRunner({
712
+ maxQueueDepth: this.config.maxQueueDepth,
713
+ maxConcurrentInferences: this.config.maxConcurrentInferences,
714
+ targetLoadPercent: this.config.targetLoadPercent,
715
+ minThrottledFps: this.config.minThrottledFps,
716
+ processFrame: (deviceId, frame) => this.runInference(deviceId, frame),
717
+ analyzeMotion: (deviceId, frame) => this.runMotionAnalysis(deviceId, frame),
718
+ onPhaseChanged: (deviceId, phase, meta) => this.handlePhaseChanged(deviceId, phase, meta),
719
+ logger: this.ctx.logger
720
+ });
721
+ this.runner.timingSampler.setLogger(this.ctx.logger.child("timing"));
722
+ this.runner.onDetectionStreamChange((deviceId, action) => {
723
+ this.handleDetectionStreamChange(deviceId, action);
724
+ });
725
+ this.runner.onResult(async (deviceId, frame, result, _streamType) => {
726
+ this.emitInferenceResult(deviceId, frame, result);
727
+ });
728
+ this.runner.start();
729
+ this.ctx.logger.info(
730
+ "Pipeline runner started",
731
+ {
732
+ tags: { nodeId: this.nodeId },
733
+ meta: {
734
+ maxConcurrent: this.config.maxConcurrentInferences,
735
+ queueDepth: this.config.maxQueueDepth
736
+ }
737
+ }
738
+ );
739
+ if (this.ctx.eventBus) {
740
+ this.unsubMotionEvents = this.ctx.eventBus.subscribe(
741
+ { category: EventCategory.MotionOnMotionChanged },
742
+ (event) => {
743
+ const data = event.data;
744
+ const deviceId = data.deviceId;
745
+ const attachment = this.attached.get(deviceId);
746
+ if (!attachment) return;
747
+ const source = data.source;
748
+ if (!attachment.config.motionSources.includes(source)) return;
749
+ this.runner?.reportMotion(
750
+ deviceId,
751
+ data.detected,
752
+ source,
753
+ data.regions ? [...data.regions] : void 0
754
+ );
755
+ }
756
+ );
757
+ }
758
+ this.stepLogTimer = setInterval(() => this.logAttachedSteps(), STEP_LOG_INTERVAL_MS);
759
+ this.metricsSnapshotTimer = setInterval(
760
+ () => this.emitMetricsSnapshot(),
761
+ METRICS_SNAPSHOT_INTERVAL_MS
762
+ );
763
+ return {
764
+ providers: [{ capability: pipelineRunnerCapability, provider: this }],
765
+ customActions: pipelineRunnerBenchActions,
766
+ actionHandlers: {
767
+ cacheBenchFrame: async (input) => this.cacheBenchFrame(input),
768
+ releaseBenchFrame: async (input) => this.releaseBenchFrame(input),
769
+ runSyntheticBench: async (input) => this.runSyntheticBench(input)
770
+ }
771
+ };
772
+ }
773
+ async onShutdown() {
774
+ if (this.metricsSnapshotTimer) {
775
+ clearInterval(this.metricsSnapshotTimer);
776
+ this.metricsSnapshotTimer = null;
777
+ }
778
+ if (this.stepLogTimer) {
779
+ clearInterval(this.stepLogTimer);
780
+ this.stepLogTimer = null;
781
+ }
782
+ if (this.benchFrameSweeper) {
783
+ clearInterval(this.benchFrameSweeper);
784
+ this.benchFrameSweeper = null;
785
+ }
786
+ this.benchFrameCache.clear();
787
+ if (this.unsubMotionEvents) {
788
+ this.unsubMotionEvents();
789
+ this.unsubMotionEvents = null;
790
+ }
791
+ this.lastAnalyzerDetected.clear();
792
+ if (this.runner) {
793
+ this.runner.stop();
794
+ this.runner = null;
795
+ }
796
+ for (const attachment of this.attached.values()) {
797
+ attachment.motionUnsubscribe?.();
798
+ attachment.detectionUnsubscribe?.();
799
+ }
800
+ this.attached.clear();
801
+ }
802
+ // ── Synthetic bench (production-equivalent measurement) ───────────────
803
+ async cacheBenchFrame(input) {
804
+ const sharp = (await import("sharp")).default;
805
+ const jpeg = Buffer.from(input.imageBase64, "base64");
806
+ const { data, info } = await sharp(jpeg).raw().toBuffer({ resolveWithObject: true });
807
+ if (info.channels !== 3) {
808
+ throw new Error(`cacheBenchFrame: expected 3 channels (rgb), got ${info.channels}`);
809
+ }
810
+ const rgb = new Uint8Array(data);
811
+ const ttlMs = Math.max(6e4, (input.ttlSeconds ?? 600) * 1e3);
812
+ const frameId = `runner-bench-${Date.now().toString(36)}-${Math.random().toString(36).slice(2, 10)}`;
813
+ const expiresAt = Date.now() + ttlMs;
814
+ this.benchFrameCache.set(frameId, { data: rgb, width: info.width, height: info.height, format: "rgb", expiresAt });
815
+ if (!this.benchFrameSweeper) {
816
+ this.benchFrameSweeper = setInterval(() => this.sweepBenchFrameCache(), 6e4);
817
+ this.benchFrameSweeper.unref?.();
818
+ }
819
+ this.ctx.logger.info("cached bench frame", {
820
+ meta: { frameId, width: info.width, height: info.height, bytes: rgb.length, ttlMs }
821
+ });
822
+ return { frameId, width: info.width, height: info.height, expiresAt };
823
+ }
824
+ async releaseBenchFrame(input) {
825
+ return { released: this.benchFrameCache.delete(input.frameId) };
826
+ }
827
+ sweepBenchFrameCache() {
828
+ const now = Date.now();
829
+ for (const [id, entry] of this.benchFrameCache) {
830
+ if (entry.expiresAt < now) this.benchFrameCache.delete(id);
831
+ }
832
+ }
833
+ async runSyntheticBench(input) {
834
+ const ctx = this.ctx;
835
+ const api = ctx.api;
836
+ if (!api) throw new Error("runSyntheticBench: ctx.api unavailable");
837
+ ctx.logger.info("runSyntheticBench input", {
838
+ meta: { frameId: input.frameId, parallel: input.parallel, iterations: input.iterations }
839
+ });
840
+ const cached = this.benchFrameCache.get(input.frameId);
841
+ if (!cached) {
842
+ throw new Error(`runSyntheticBench: frameId ${input.frameId} not cached (call cacheBenchFrame first)`);
843
+ }
844
+ const stepsToRun = input.steps.map((s) => ({
845
+ addonId: s.addonId,
846
+ modelId: s.modelId,
847
+ enabled: s.enabled,
848
+ children: s.children ?? []
849
+ }));
850
+ const enabledSteps = stepsToRun.filter((s) => s.enabled);
851
+ const isSingleStep = enabledSteps.length === 1 && (!enabledSteps[0].children || enabledSteps[0].children.filter((c) => c.enabled).length === 0);
852
+ const useFastPath = isSingleStep && !input.simulatePipeline;
853
+ const rootStep = enabledSteps[0];
854
+ const sharedFrame = {
855
+ data: cached.data,
856
+ format: cached.format,
857
+ width: cached.width,
858
+ height: cached.height,
859
+ timestamp: Date.now()
860
+ };
861
+ let poolFrameId = null;
862
+ if (useFastPath && rootStep) {
863
+ ctx.logger.info("synthetic bench: using Python cache path", {
864
+ meta: { step: rootStep.addonId, model: rootStep.modelId }
865
+ });
866
+ const cacheResult = await api.pipelineExecutor.cacheFrameInPool.mutate({
867
+ data: new Uint8Array(cached.data.slice().buffer),
868
+ width: cached.width,
869
+ height: cached.height,
870
+ format: cached.format
871
+ });
872
+ poolFrameId = cacheResult.frameId;
873
+ await api.pipelineExecutor.runPipeline.mutate({
874
+ steps: stepsToRun,
875
+ frame: sharedFrame,
876
+ ...input.engine ? { engine: input.engine } : {}
877
+ });
878
+ const warmupCount2 = input.warmup ?? 1;
879
+ for (let w = 0; w < warmupCount2; w++) {
880
+ await api.pipelineExecutor.inferCached.mutate({
881
+ stepId: rootStep.addonId,
882
+ frameId: poolFrameId
883
+ });
884
+ }
885
+ const wallTimings2 = [];
886
+ const inferTimings2 = [];
887
+ const preprocessTimings2 = [];
888
+ const predictTimings2 = [];
889
+ const batchSizes2 = [];
890
+ const detCounts2 = [];
891
+ let _n = 0;
892
+ const sessionId2 = input.sessionId ?? `synth-${Date.now().toString(36)}`;
893
+ const totalRuns2 = input.parallel * input.iterations;
894
+ const wallStart2 = performance.now();
895
+ const worker2 = async () => {
896
+ for (let i = 0; i < input.iterations; i++) {
897
+ const t0 = performance.now();
898
+ const result = await api.pipelineExecutor.inferCached.mutate({
899
+ stepId: rootStep.addonId,
900
+ frameId: poolFrameId
901
+ });
902
+ const wallMs = performance.now() - t0;
903
+ const r = result;
904
+ const inferMs = typeof r["inferenceMs"] === "number" ? r["inferenceMs"] : wallMs;
905
+ const preMs = typeof r["preprocessMs"] === "number" ? r["preprocessMs"] : 0;
906
+ const predMs = typeof r["predictMs"] === "number" ? r["predictMs"] : 0;
907
+ const bs = typeof r["batchSize"] === "number" ? r["batchSize"] : 1;
908
+ const dets = Array.isArray(r["detections"]) ? r["detections"].length : 0;
909
+ wallTimings2.push(wallMs);
910
+ inferTimings2.push(inferMs);
911
+ preprocessTimings2.push(preMs);
912
+ predictTimings2.push(predMs);
913
+ batchSizes2.push(bs);
914
+ detCounts2.push(dets);
915
+ const n = ++_n;
916
+ if (n <= 20) {
917
+ ctx.logger.info("bench call trace (cached)", {
918
+ meta: { n, wallMs: Math.round(wallMs), inferMs: Math.round(inferMs), preMs: Math.round(preMs * 10) / 10, predMs: Math.round(predMs * 10) / 10, bs }
919
+ });
920
+ }
921
+ if (n % Math.max(1, input.parallel) === 0) {
922
+ const elapsed = (performance.now() - wallStart2) / 1e3;
923
+ const fps = elapsed > 0 ? n / elapsed : 0;
924
+ const meanCallMs = wallTimings2.reduce((s, v) => s + v, 0) / wallTimings2.length;
925
+ const sorted = [...wallTimings2].sort((a, b) => a - b);
926
+ const p95 = sorted[Math.min(sorted.length - 1, Math.floor(0.95 * sorted.length))] ?? 0;
927
+ const totalDet = detCounts2.reduce((s, v) => s + v, 0);
928
+ const avgDet = detCounts2.length > 0 ? totalDet / detCounts2.length : 0;
929
+ const bsMean = batchSizes2.reduce((s, v) => s + v, 0) / batchSizes2.length;
930
+ const msg = `runs ${n}/${totalRuns2} · ${fps.toFixed(1)} fps · call ${meanCallMs.toFixed(1)}ms · batch ${bsMean.toFixed(1)}`;
931
+ if (ctx.eventBus) {
932
+ ctx.eventBus.emit({
933
+ id: `bench-${n}`,
934
+ timestamp: /* @__PURE__ */ new Date(),
935
+ source: { type: "pipeline", id: "synthetic-bench" },
936
+ category: EventCategory.PipelineProgress,
937
+ data: {
938
+ nodeId: "hub",
939
+ sessionId: sessionId2,
940
+ step: "synthetic-bench",
941
+ message: msg,
942
+ benchProgress: true,
943
+ runs: n,
944
+ totalRuns: totalRuns2,
945
+ fps: Math.round(fps * 100) / 100,
946
+ meanMs: Math.round(meanCallMs * 100) / 100,
947
+ p95Ms: Math.round(p95 * 100) / 100,
948
+ inferMeanMs: Math.round(inferTimings2.reduce((s, v) => s + v, 0) / inferTimings2.length * 100) / 100,
949
+ preprocessMeanMs: Math.round(preprocessTimings2.reduce((s, v) => s + v, 0) / preprocessTimings2.length * 100) / 100,
950
+ predictMeanMs: Math.round(predictTimings2.reduce((s, v) => s + v, 0) / predictTimings2.length * 100) / 100,
951
+ batchSizeMean: Math.round(bsMean * 100) / 100,
952
+ detPerSec: elapsed > 0 ? Math.round(totalDet / elapsed * 100) / 100 : 0,
953
+ avgDetections: Math.round(avgDet * 100) / 100
954
+ }
955
+ });
956
+ } else {
957
+ ctx.logger.warn("emitProgress: NO eventBus");
958
+ }
959
+ }
960
+ }
961
+ };
962
+ await Promise.all(Array.from({ length: input.parallel }, () => worker2()));
963
+ const wallSec2 = (performance.now() - wallStart2) / 1e3;
964
+ await api.pipelineExecutor.uncacheFrame.mutate({ frameId: poolFrameId }).catch(() => {
965
+ });
966
+ return this.buildBenchResult(wallTimings2, inferTimings2, preprocessTimings2, predictTimings2, batchSizes2, detCounts2, wallSec2, "cached");
967
+ }
968
+ ctx.logger.info("synthetic bench: using full runPipeline path", {
969
+ meta: { steps: enabledSteps.length, simulatePipeline: !!input.simulatePipeline }
970
+ });
971
+ let _callCount = 0;
972
+ const callOnce = async () => {
973
+ const t0 = performance.now();
974
+ const result = await api.pipelineExecutor.runPipeline.mutate({
975
+ steps: stepsToRun,
976
+ frame: sharedFrame,
977
+ ...input.engine ? { engine: input.engine } : {}
978
+ });
979
+ const wallMs = performance.now() - t0;
980
+ const n = ++_callCount;
981
+ if (n <= 20) {
982
+ ctx.logger.info("bench call trace", {
983
+ meta: {
984
+ n,
985
+ wallMs: Math.round(wallMs),
986
+ totalInferenceMs: Math.round(result.debug?.totalInferenceMs ?? 0),
987
+ predictMs: Math.round((result.debug?.predictMs ?? 0) * 10) / 10,
988
+ preprocessMs: Math.round((result.debug?.preprocessMs ?? 0) * 10) / 10,
989
+ batchSize: result.debug?.batchSize ?? 1
990
+ }
991
+ });
992
+ }
993
+ return { wallMs, result };
994
+ };
995
+ const warmupCount = input.warmup ?? 1;
996
+ for (let i = 0; i < warmupCount; i++) {
997
+ await callOnce();
998
+ }
999
+ const wallTimings = [];
1000
+ const serverWallTimings = [];
1001
+ const inferTimings = [];
1002
+ const preprocessTimings = [];
1003
+ const predictTimings = [];
1004
+ const batchSizes = [];
1005
+ const detCounts = [];
1006
+ const sessionId = input.sessionId ?? `synth-${Date.now().toString(36)}`;
1007
+ const totalRuns = input.parallel * input.iterations;
1008
+ const wallStart = performance.now();
1009
+ const worker = async () => {
1010
+ for (let i = 0; i < input.iterations; i++) {
1011
+ const { wallMs, result } = await callOnce();
1012
+ wallTimings.push(wallMs);
1013
+ serverWallTimings.push(result.debug?.wallMs ?? 0);
1014
+ inferTimings.push(result.debug?.totalInferenceMs ?? 0);
1015
+ preprocessTimings.push(result.debug?.preprocessMs ?? 0);
1016
+ predictTimings.push(result.debug?.predictMs ?? 0);
1017
+ batchSizes.push(result.debug?.batchSize ?? 1);
1018
+ detCounts.push(result.detections?.length ?? 0);
1019
+ const n = wallTimings.length;
1020
+ if (n % Math.max(1, input.parallel) === 0 && ctx.eventBus) {
1021
+ const elapsed = (performance.now() - wallStart) / 1e3;
1022
+ const fps = elapsed > 0 ? n / elapsed : 0;
1023
+ const meanMs = wallTimings.reduce((s, v) => s + v, 0) / n;
1024
+ const sorted = [...wallTimings].sort((a, b) => a - b);
1025
+ const p95 = sorted[Math.min(sorted.length - 1, Math.floor(0.95 * sorted.length))] ?? 0;
1026
+ const totalDet = detCounts.reduce((s, v) => s + v, 0);
1027
+ const bsMean = batchSizes.reduce((s, v) => s + v, 0) / n;
1028
+ ctx.eventBus.emit({
1029
+ id: `bench-${n}`,
1030
+ timestamp: /* @__PURE__ */ new Date(),
1031
+ source: { type: "pipeline", id: "synthetic-bench" },
1032
+ category: EventCategory.PipelineProgress,
1033
+ data: {
1034
+ nodeId: "hub",
1035
+ sessionId,
1036
+ step: "synthetic-bench",
1037
+ message: `runs ${n}/${totalRuns} · ${fps.toFixed(1)} fps · call ${meanMs.toFixed(1)}ms · batch ${bsMean.toFixed(1)}`,
1038
+ benchProgress: true,
1039
+ runs: n,
1040
+ totalRuns,
1041
+ fps: Math.round(fps * 100) / 100,
1042
+ meanMs: Math.round(meanMs * 100) / 100,
1043
+ p95Ms: Math.round(p95 * 100) / 100,
1044
+ inferMeanMs: Math.round(inferTimings.reduce((s, v) => s + v, 0) / n * 100) / 100,
1045
+ preprocessMeanMs: Math.round(preprocessTimings.reduce((s, v) => s + v, 0) / n * 100) / 100,
1046
+ predictMeanMs: Math.round(predictTimings.reduce((s, v) => s + v, 0) / n * 100) / 100,
1047
+ batchSizeMean: Math.round(bsMean * 100) / 100,
1048
+ detPerSec: elapsed > 0 ? Math.round(totalDet / elapsed * 100) / 100 : 0,
1049
+ avgDetections: n > 0 ? Math.round(totalDet / n * 100) / 100 : 0
1050
+ }
1051
+ });
1052
+ }
1053
+ }
1054
+ };
1055
+ await Promise.all(Array.from({ length: input.parallel }, () => worker()));
1056
+ const wallSec = (performance.now() - wallStart) / 1e3;
1057
+ return this.buildBenchResult(wallTimings, inferTimings, preprocessTimings, predictTimings, batchSizes, detCounts, wallSec, "pipeline");
1058
+ }
1059
+ async buildBenchResult(wallTimings, inferTimings, preprocessTimings, predictTimings, batchSizes, detCounts, wallSec, path) {
1060
+ const meanOfArr = (xs) => xs.length > 0 ? xs.reduce((s, v) => s + v, 0) / xs.length : 0;
1061
+ this.ctx.logger.info("synthetic bench summary", {
1062
+ meta: {
1063
+ runs: wallTimings.length,
1064
+ wallSec: Math.round(wallSec * 100) / 100,
1065
+ fps: Math.round(wallTimings.length / wallSec * 100) / 100,
1066
+ callMeanMs: Math.round(meanOfArr(wallTimings)),
1067
+ inferMeanMs: Math.round(meanOfArr(inferTimings)),
1068
+ preprocessMeanMs: Math.round(meanOfArr(preprocessTimings)),
1069
+ predictMeanMs: Math.round(meanOfArr(predictTimings)),
1070
+ batchSizeMean: Math.round(meanOfArr(batchSizes) * 100) / 100,
1071
+ batchSizeMax: batchSizes.length > 0 ? Math.max(...batchSizes) : 0
1072
+ }
1073
+ });
1074
+ const sorted = [...wallTimings].sort((a, b) => a - b);
1075
+ const pick = (q) => sorted.length > 0 ? sorted[Math.min(sorted.length - 1, Math.floor(q * sorted.length))] : 0;
1076
+ const meanOf = (xs) => xs.length > 0 ? xs.reduce((s, v) => s + v, 0) / xs.length : 0;
1077
+ const totalRuns = wallTimings.length;
1078
+ const totalDet = detCounts.reduce((s, v) => s + v, 0);
1079
+ return {
1080
+ runs: totalRuns,
1081
+ wallSec: Math.round(wallSec * 1e3) / 1e3,
1082
+ fps: wallSec > 0 ? Math.round(totalRuns / wallSec * 100) / 100 : 0,
1083
+ detectionsPerSec: wallSec > 0 ? Math.round(totalDet / wallSec * 100) / 100 : 0,
1084
+ avgDetections: totalRuns > 0 ? Math.round(totalDet / totalRuns * 100) / 100 : 0,
1085
+ callMs: {
1086
+ mean: Math.round(meanOf(wallTimings) * 100) / 100,
1087
+ p50: Math.round(pick(0.5) * 100) / 100,
1088
+ p95: Math.round(pick(0.95) * 100) / 100,
1089
+ p99: Math.round(pick(0.99) * 100) / 100
1090
+ },
1091
+ inferMs: Math.round(meanOf(inferTimings) * 100) / 100,
1092
+ preprocessMs: Math.round(meanOf(preprocessTimings) * 100) / 100,
1093
+ predictMs: Math.round(meanOf(predictTimings) * 100) / 100,
1094
+ batchSizeMean: Math.round(meanOf(batchSizes) * 100) / 100,
1095
+ batchSizeMax: batchSizes.length > 0 ? Math.max(...batchSizes) : 0,
1096
+ path,
1097
+ ...await this.getEngineAndTuning()
1098
+ };
1099
+ }
1100
+ async getEngineAndTuning() {
1101
+ try {
1102
+ const api = this.ctx.api;
1103
+ if (!api) return {};
1104
+ const [eng, tuning] = await Promise.all([
1105
+ api.pipelineExecutor.getSelectedEngine.query(),
1106
+ api.pipelineExecutor.getEffectiveTuning.query()
1107
+ ]);
1108
+ return {
1109
+ engine: eng ? { runtime: eng.runtime, backend: eng.backend, device: eng.device } : void 0,
1110
+ tuning: tuning ?? void 0
1111
+ };
1112
+ } catch {
1113
+ return {};
1114
+ }
1115
+ }
1116
+ // ── IPipelineRunnerProvider implementation ────────────────────────────
1117
+ async attachCamera(config) {
1118
+ const runner = this.runner;
1119
+ const ctx = this.ctx;
1120
+ if (!runner || !ctx) {
1121
+ throw new Error("PipelineRunnerAddon: attachCamera called before initialize completed");
1122
+ }
1123
+ this.ctx.logger.info("attachCamera received config", {
1124
+ tags: { deviceId: config.deviceId },
1125
+ meta: {
1126
+ motionSources: config.motionSources,
1127
+ motionSourcesType: Array.isArray(config.motionSources) ? `array(${config.motionSources.length})` : typeof config.motionSources,
1128
+ motionStreamId: config.motionStreamId,
1129
+ detectionStreamId: config.detectionStreamId,
1130
+ keys: Object.keys(config)
1131
+ }
1132
+ });
1133
+ if (this.attached.has(config.deviceId)) {
1134
+ this.detachInternal(config.deviceId);
1135
+ }
1136
+ runner.registerCamera(config.deviceId, {
1137
+ detectionMode: config.detectionMode,
1138
+ fps: config.detectionFps,
1139
+ motionCooldownMs: config.motionCooldownMs
1140
+ });
1141
+ const attachment = {
1142
+ config,
1143
+ motionUnsubscribe: null,
1144
+ detectionUnsubscribe: null
1145
+ };
1146
+ this.attached.set(config.deviceId, attachment);
1147
+ if (config.motionSources.includes("analyzer")) {
1148
+ attachment.motionUnsubscribe = await this.subscribeMotionFrames(config);
1149
+ }
1150
+ const stepsCount = config.steps?.length ?? 0;
1151
+ const dispatch = stepsCount > 0 ? `runPipeline(${stepsCount}step${stepsCount === 1 ? "" : "s"})` : config.steps !== void 0 ? "skip(0steps)" : "runFrame(legacy)";
1152
+ const engineLabel = config.engine ? `${config.engine.runtime}+${config.engine.backend}/${config.engine.format}` : "default";
1153
+ this.ctx.logger.info(
1154
+ "attachCamera",
1155
+ {
1156
+ tags: { deviceId: config.deviceId },
1157
+ meta: {
1158
+ detectionMode: config.detectionMode,
1159
+ audioMode: config.audioMode,
1160
+ motionFps: config.motionFps,
1161
+ detectionFps: config.detectionFps,
1162
+ motionSources: config.motionSources,
1163
+ dispatch,
1164
+ engine: engineLabel
1165
+ }
1166
+ }
1167
+ );
1168
+ return { success: true };
1169
+ }
1170
+ async detachCamera(input) {
1171
+ this.detachInternal(input.deviceId);
1172
+ return { success: true };
1173
+ }
1174
+ async reportMotion(input) {
1175
+ this.runner?.reportMotion(input.deviceId, input.detected, input.source, input.regions);
1176
+ return { success: true };
1177
+ }
1178
+ /**
1179
+ * Periodic per-camera step roster dump. Once every
1180
+ * STEP_LOG_INTERVAL_MS (30s) emits one log line per attached camera
1181
+ * with the configured detection step tree + audio classifier branch
1182
+ * so an operator looking at the agent log can quickly see what each
1183
+ * camera is currently running without crossing tRPC. Skips when no
1184
+ * cameras are attached so quiet dev runs stay silent.
1185
+ */
1186
+ logAttachedSteps() {
1187
+ if (this.attached.size === 0) return;
1188
+ for (const [deviceId, attachment] of this.attached) {
1189
+ const cfg = attachment.config;
1190
+ const detectionSteps = cfg.steps && cfg.steps.length > 0 ? this.flattenSteps(cfg.steps).filter((s) => s.enabled) : [];
1191
+ const detectionLabel = detectionSteps.length > 0 ? detectionSteps.map((s) => `${s.addonId}/${s.modelId}`).join(" → ") : "<none>";
1192
+ const audioLabel = cfg.audio && cfg.audio.enabled ? `${cfg.audio.engine.runtime}/${cfg.audio.engine.backend}/${cfg.audio.modelId}` : "<off>";
1193
+ const engineLabel = cfg.engine ? `${cfg.engine.runtime}/${cfg.engine.backend}${cfg.engine.device ? `/${cfg.engine.device}` : ""}` : "<unset>";
1194
+ this.ctx.logger.info("Camera pipeline roster", {
1195
+ tags: { deviceId },
1196
+ meta: {
1197
+ phase: "roster",
1198
+ intervalSec: STEP_LOG_INTERVAL_MS / 1e3,
1199
+ pipelineEnabled: cfg.pipelineEnabled,
1200
+ motionSources: cfg.motionSources,
1201
+ motionFps: cfg.motionFps,
1202
+ detectionFps: cfg.detectionFps,
1203
+ engine: engineLabel,
1204
+ videoSteps: detectionLabel,
1205
+ videoStepCount: detectionSteps.length,
1206
+ audio: audioLabel
1207
+ }
1208
+ });
1209
+ }
1210
+ }
1211
+ /** Recursively flatten the step tree → ordered list of every node. */
1212
+ flattenSteps(steps) {
1213
+ const out = [];
1214
+ const walk = (s) => {
1215
+ out.push(s);
1216
+ if (s.children) {
1217
+ for (const c of s.children) walk(c);
1218
+ }
1219
+ };
1220
+ for (const s of steps) walk(s);
1221
+ return out;
1222
+ }
1223
+ detachInternal(deviceId) {
1224
+ const attachment = this.attached.get(deviceId);
1225
+ if (!attachment) return;
1226
+ attachment.motionUnsubscribe?.();
1227
+ attachment.detectionUnsubscribe?.();
1228
+ this.attached.delete(deviceId);
1229
+ this.lastMotionAt.delete(deviceId);
1230
+ this.lastEmittedCameraMetrics.delete(deviceId);
1231
+ this.runner?.unregisterCamera(deviceId);
1232
+ this.ctx?.logger.info("detachCamera", { tags: { deviceId } });
1233
+ }
1234
+ async getLocalLoad() {
1235
+ const metrics = this.runner?.getMetrics() ?? { avgInferenceTimeMs: 0, queueDepth: 0 };
1236
+ const allCameraMetrics = this.runner?.getAllCameraMetrics() ?? [];
1237
+ let activeCameras = 0;
1238
+ let totalActualFps = 0;
1239
+ for (const cm of allCameraMetrics) {
1240
+ if (cm.phase === "active") activeCameras++;
1241
+ totalActualFps += cm.actualFps;
1242
+ }
1243
+ return {
1244
+ nodeId: this.nodeId,
1245
+ attachedCameras: this.attached.size,
1246
+ activeCameras,
1247
+ avgInferenceFps: totalActualFps,
1248
+ avgInferenceTimeMs: metrics.avgInferenceTimeMs,
1249
+ queueDepthTotal: metrics.queueDepth,
1250
+ hardware: {
1251
+ hasGpu: false,
1252
+ inferenceBackend: void 0
1253
+ }
1254
+ };
1255
+ }
1256
+ async getLocalMetrics() {
1257
+ const m = this.runner?.getMetrics() ?? { activeCameras: 0, throttledCameras: 0, avgInferenceTimeMs: 0, queueDepth: 0 };
1258
+ return { nodeId: this.nodeId, ...m };
1259
+ }
1260
+ async getCameraMetrics(input) {
1261
+ return this.runner?.getCameraMetrics(input.deviceId) ?? null;
1262
+ }
1263
+ getAllCameraMetrics() {
1264
+ return this.runner?.getAllCameraMetrics() ?? [];
1265
+ }
1266
+ getLocalCameras() {
1267
+ return [...this.attached.keys()];
1268
+ }
1269
+ // ── Internal: broker subscription wiring ─────────────────────────────
1270
+ async subscribeMotionFrames(config) {
1271
+ const ctx = this.ctx;
1272
+ const runner = this.runner;
1273
+ if (!ctx || !runner) return null;
1274
+ const log = this.ctx.logger.withTags({ deviceId: config.deviceId });
1275
+ const api = this.ctx.api;
1276
+ if (!api) {
1277
+ log.warn("subscribeMotionFrames: this.ctx.api not available");
1278
+ return null;
1279
+ }
1280
+ const motionBrokerId = `${config.deviceId}/${config.motionStreamId}`;
1281
+ const motionBroker = await api.streamBroker.getBroker.query({ brokerId: motionBrokerId });
1282
+ if (!motionBroker) {
1283
+ log.warn("subscribeMotionFrames: no broker found", { meta: { brokerId: motionBrokerId } });
1284
+ return null;
1285
+ }
1286
+ return motionBroker.onDecodedFrame(
1287
+ (frame) => {
1288
+ runner.enqueueMotionFrame(config.deviceId, frame);
1289
+ },
1290
+ { maxFps: config.motionFps, format: "gray", tag: "motion" }
1291
+ );
1292
+ }
1293
+ handleDetectionStreamChange(deviceId, action) {
1294
+ const attachment = this.attached.get(deviceId);
1295
+ if (!attachment) return;
1296
+ if (action === "subscribe") {
1297
+ void this.subscribeDetectionFrames(attachment.config).then((unsub) => {
1298
+ attachment.detectionUnsubscribe = unsub;
1299
+ });
1300
+ } else {
1301
+ attachment.detectionUnsubscribe?.();
1302
+ attachment.detectionUnsubscribe = null;
1303
+ }
1304
+ }
1305
+ /**
1306
+ * Bridge runner phase transitions to the device's `motion` runtime
1307
+ * state + the bus. Single ownership point — every motion source
1308
+ * (analyzer, onboard, future variants) funnels through the runner's
1309
+ * phase machine and lands here.
1310
+ *
1311
+ * - Cap-state via the unified `device-state.setCapSlice` API.
1312
+ * `autoClearAfterMs = cooldownMs` on ON, `null` on OFF.
1313
+ * `lastDetectedAt` is preserved across OFF using `lastMotionAt`.
1314
+ * - Bus event `MotionOnMotionChanged` fires alongside for consumers
1315
+ * that prefer event-driven over runtime-state polling.
1316
+ */
1317
+ handlePhaseChanged(deviceId, phase, meta) {
1318
+ const detected = phase === "active";
1319
+ if (detected) this.lastMotionAt.set(deviceId, meta.timestamp);
1320
+ const lastDetectedAt = this.lastMotionAt.get(deviceId) ?? null;
1321
+ const slice = {
1322
+ detected,
1323
+ lastDetectedAt,
1324
+ autoClearAfterMs: detected ? meta.cooldownMs : null
1325
+ };
1326
+ void (async () => {
1327
+ const dev = await this.ctx.fetchDevice(deviceId);
1328
+ await dev.deviceState.setCapSlice({ capName: "motion", slice });
1329
+ })().catch((err) => {
1330
+ this.ctx.logger.debug("motion cap-state write failed", {
1331
+ tags: { deviceId },
1332
+ meta: { error: errMsg(err) }
1333
+ });
1334
+ });
1335
+ if (this.ctx.eventBus) {
1336
+ const from = detected ? "watching" : "active";
1337
+ const to = detected ? "active" : "watching";
1338
+ const reason = detected ? "motion_detected" : "cooldown_expired";
1339
+ const payload = {
1340
+ deviceId,
1341
+ from,
1342
+ to,
1343
+ reason,
1344
+ source: meta.source,
1345
+ cooldownMs: meta.cooldownMs,
1346
+ timestamp: meta.timestamp
1347
+ };
1348
+ this.ctx.eventBus.emit(createEvent(
1349
+ EventCategory.DetectionPhaseTransition,
1350
+ { type: "device", id: deviceId, addonId: this.ctx.id, deviceId, nodeId: this.nodeId },
1351
+ payload
1352
+ ));
1353
+ }
1354
+ }
1355
+ async subscribeDetectionFrames(config) {
1356
+ const ctx = this.ctx;
1357
+ const runner = this.runner;
1358
+ if (!ctx || !runner) return null;
1359
+ const log = this.ctx.logger.withTags({ deviceId: config.deviceId });
1360
+ const api = this.ctx.api;
1361
+ if (!api) {
1362
+ log.warn("subscribeDetectionFrames: this.ctx.api not available");
1363
+ return null;
1364
+ }
1365
+ const detectionBrokerId = `${config.deviceId}/${config.detectionStreamId}`;
1366
+ const detectionBroker = await api.streamBroker.getBroker.query({ brokerId: detectionBrokerId });
1367
+ if (!detectionBroker) {
1368
+ log.warn("subscribeDetectionFrames: no broker found", { meta: { brokerId: detectionBrokerId } });
1369
+ return null;
1370
+ }
1371
+ return detectionBroker.onDecodedFrame(
1372
+ (frame) => {
1373
+ runner.enqueueDetectionFrame(config.deviceId, frame);
1374
+ },
1375
+ // `format: 'rgb'` is the Phase 4 hot-path switch: detection now
1376
+ // requests raw RGB24 from the broker so the decoder skips the
1377
+ // sharp JPEG encode and the Python pool skips PIL JPEG decode.
1378
+ // When WebRTC also subscribes (jpeg), the broker derives JPEG
1379
+ // once per frame via its conversion cache — no double work.
1380
+ { maxFps: config.detectionFps, format: "rgb", tag: "detection" }
1381
+ );
1382
+ }
1383
+ // ── Internal: inference + motion callbacks ───────────────────────────
1384
+ async runInference(deviceId, frame) {
1385
+ const ctx = this.ctx;
1386
+ if (!ctx) return null;
1387
+ const log = this.ctx.logger.withTags({ deviceId });
1388
+ const api = this.ctx.api;
1389
+ if (!api) {
1390
+ log.error("runInference: this.ctx.api not available");
1391
+ return null;
1392
+ }
1393
+ const attachment = this.attached.get(deviceId);
1394
+ const camConfig = attachment?.config;
1395
+ const steps = camConfig?.steps;
1396
+ const engine = camConfig?.engine;
1397
+ if (!steps) {
1398
+ log.warn("runInference: no steps in attach config — skipping frame (legacy attach?)");
1399
+ return null;
1400
+ }
1401
+ if (steps.length === 0) {
1402
+ return null;
1403
+ }
1404
+ try {
1405
+ return await api.pipelineExecutor.runPipeline.mutate({
1406
+ // tRPC input is a mutable array; the attach payload holds it
1407
+ // as readonly. One spread copy at the cap boundary is cheap
1408
+ // (pipeline step trees are tiny) and keeps the type surface
1409
+ // clean without casting.
1410
+ steps: [...steps],
1411
+ frame,
1412
+ deviceId,
1413
+ ...engine ? { engine } : {}
1414
+ });
1415
+ } catch (err) {
1416
+ const msg = errMsg(err);
1417
+ log.error("runInference failed", { meta: { error: msg } });
1418
+ return null;
1419
+ }
1420
+ }
1421
+ async runMotionAnalysis(deviceId, frame) {
1422
+ const ctx = this.ctx;
1423
+ const runner = this.runner;
1424
+ if (!ctx || !runner) return;
1425
+ const log = this.ctx.logger.withTags({ deviceId });
1426
+ const motionStart = Date.now();
1427
+ try {
1428
+ const api = this.ctx.api;
1429
+ if (!api) {
1430
+ log.warn("runMotionAnalysis: this.ctx.api not available");
1431
+ return;
1432
+ }
1433
+ const result = await api.motionDetection.analyze.mutate({ deviceId, frame: toFrameInput(frame) });
1434
+ if (!result) return;
1435
+ const detected = result.regions.length > 0;
1436
+ const prevDetected = this.lastAnalyzerDetected.get(deviceId) ?? false;
1437
+ if (detected !== prevDetected) {
1438
+ this.lastAnalyzerDetected.set(deviceId, detected);
1439
+ if (this.ctx.eventBus) {
1440
+ this.ctx.eventBus.emit(createEvent(
1441
+ EventCategory.MotionOnMotionChanged,
1442
+ // EventSource wrapper kept symmetric with the onboard
1443
+ // emit (Reolink/ONVIF/etc.) so consumers grouping by
1444
+ // addonId / deviceId see consistent provenance. `nodeId`
1445
+ // identifies which cluster node ran the analyzer.
1446
+ { type: "device", id: deviceId, addonId: this.ctx.id, deviceId, nodeId: this.nodeId },
1447
+ {
1448
+ deviceId,
1449
+ detected,
1450
+ timestamp: frame.timestamp,
1451
+ source: "analyzer",
1452
+ ...detected ? { regions: result.regions } : {}
1453
+ }
1454
+ ));
1455
+ }
1456
+ }
1457
+ if (this.ctx.eventBus) {
1458
+ const motionPayload = {
1459
+ detected,
1460
+ regionCount: result.regions.length,
1461
+ regions: result.regions.map((r) => ({
1462
+ bbox: { x: r.bbox.x, y: r.bbox.y, w: r.bbox.w, h: r.bbox.h },
1463
+ pixelCount: r.pixelCount,
1464
+ intensity: r.intensity
1465
+ })),
1466
+ frameWidth: frame.width,
1467
+ frameHeight: frame.height,
1468
+ analysisMs: result.analysisMs
1469
+ };
1470
+ const analyzerSource = { type: "device", id: deviceId, addonId: this.ctx.id, deviceId, nodeId: this.nodeId };
1471
+ this.ctx.eventBus.emit(createEvent(
1472
+ EventCategory.MotionAnalysis,
1473
+ analyzerSource,
1474
+ motionPayload
1475
+ ));
1476
+ const zonesPayload = {
1477
+ deviceId,
1478
+ timestamp: frame.timestamp,
1479
+ zones: result.rawRegions.map((r) => ({
1480
+ bbox: [r.bbox.x, r.bbox.y, r.bbox.x + r.bbox.w, r.bbox.y + r.bbox.h],
1481
+ pixelCount: r.pixelCount,
1482
+ changeScore: r.intensity / 255
1483
+ })),
1484
+ frameSize: { width: frame.width, height: frame.height }
1485
+ };
1486
+ this.ctx.eventBus.emit(createEvent(
1487
+ EventCategory.MotionZonesRaw,
1488
+ analyzerSource,
1489
+ zonesPayload
1490
+ ));
1491
+ }
1492
+ runner.timingSampler.addMotionSample(deviceId, Date.now() - motionStart);
1493
+ } catch (error) {
1494
+ const msg = errMsg(error);
1495
+ log.error("runMotionAnalysis failed", { meta: { error: msg } });
1496
+ }
1497
+ }
1498
+ emitInferenceResult(deviceId, _frame, result) {
1499
+ const ctx = this.ctx;
1500
+ if (!ctx?.eventBus) return;
1501
+ const payload = {
1502
+ deviceId,
1503
+ frame: result,
1504
+ nodeId: this.nodeId
1505
+ };
1506
+ this.ctx.eventBus.emit(createEvent(
1507
+ EventCategory.PipelineInferenceResult,
1508
+ { type: "device", id: deviceId, nodeId: this.nodeId },
1509
+ payload
1510
+ ));
1511
+ }
1512
+ /**
1513
+ * Emit periodic metric snapshots: one runner-load event for the
1514
+ * node + one camera-metrics event per attached camera. Subscribed
1515
+ * by admin-ui dashboards (LiveLoadPanel, NodeDetailHeader,
1516
+ * CameraStreamPanel) to drive live overlays without polling.
1517
+ *
1518
+ * Skipped when there are no cameras attached so quiet dev runs
1519
+ * don't emit needless bus traffic. The runner-load event is still
1520
+ * emitted in that case because the dashboards rely on it to see
1521
+ * "agent reachable, idle".
1522
+ */
1523
+ emitMetricsSnapshot() {
1524
+ const ctx = this.ctx;
1525
+ const runner = this.runner;
1526
+ if (!ctx?.eventBus || !runner) return;
1527
+ const timestamp = Date.now();
1528
+ void this.getLocalLoad().then((load) => {
1529
+ if (!ctx.eventBus) return;
1530
+ const json = JSON.stringify(load);
1531
+ const prev = this.lastEmittedRunnerLoad;
1532
+ const heartbeatDue = !prev || timestamp - prev.emittedAt >= METRICS_HEARTBEAT_MS;
1533
+ if (prev && prev.json === json && !heartbeatDue) return;
1534
+ this.lastEmittedRunnerLoad = { json, emittedAt: timestamp };
1535
+ ctx.eventBus.emit(createEvent(
1536
+ EventCategory.PipelineRunnerLoadSnapshot,
1537
+ { type: "node", id: this.nodeId, nodeId: this.nodeId },
1538
+ { nodeId: this.nodeId, load, timestamp }
1539
+ ));
1540
+ }).catch(() => {
1541
+ });
1542
+ if (this.attached.size === 0) return;
1543
+ for (const deviceId of this.attached.keys()) {
1544
+ const metrics = runner.getCameraMetrics(deviceId);
1545
+ if (!metrics) continue;
1546
+ const json = JSON.stringify(metrics);
1547
+ const prev = this.lastEmittedCameraMetrics.get(deviceId);
1548
+ const heartbeatDue = !prev || timestamp - prev.emittedAt >= METRICS_HEARTBEAT_MS;
1549
+ if (prev && prev.json === json && !heartbeatDue) continue;
1550
+ this.lastEmittedCameraMetrics.set(deviceId, { json, emittedAt: timestamp });
1551
+ ctx.eventBus.emit(createEvent(
1552
+ EventCategory.PipelineCameraMetricsSnapshot,
1553
+ { type: "device", id: deviceId, nodeId: this.nodeId },
1554
+ { deviceId, nodeId: this.nodeId, metrics, timestamp }
1555
+ ));
1556
+ }
1557
+ }
1558
+ // ── Standard ICamstackAddon — three-level settings API (Phase 3) ─────
1559
+ //
1560
+ // The runner is a per-node addon with only ADDON-LEVEL settings (no
1561
+ // per-device overrides, no cluster-wide tunables). All four tuning
1562
+ // fields live in `getAddonSettings()`. When the UI surface moves in
1563
+ // Phase 9 these will be rendered under Pipeline -> node -> Settings.
1564
+ globalSettingsSchema() {
1565
+ return this.schema({
1566
+ sections: [
1567
+ {
1568
+ id: "pipeline-runner-tuning",
1569
+ title: "Pipeline Runner",
1570
+ tab: "scheduler",
1571
+ description: "Per-node detection scheduler tuning. Change only if you understand the pipeline internals.",
1572
+ columns: 2,
1573
+ fields: [
1574
+ {
1575
+ type: "slider",
1576
+ key: "maxConcurrentInferences",
1577
+ label: "Scheduler concurrency",
1578
+ description: 'Max parallel inferences the runner scheduler allows across all cameras on this node. Distinct from the detection-pipeline inference-pool worker count (Pipeline tab → "Worker concurrency"), which controls Python-side thread pool sizing inside a single inference job.',
1579
+ min: 1,
1580
+ max: 16,
1581
+ step: 1,
1582
+ default: DEFAULT_CONFIG.maxConcurrentInferences,
1583
+ showValue: true
1584
+ },
1585
+ {
1586
+ type: "slider",
1587
+ key: "maxQueueDepth",
1588
+ label: "Max queue depth",
1589
+ description: "Maximum frames held per camera before dropping.",
1590
+ min: 5,
1591
+ max: 100,
1592
+ step: 5,
1593
+ default: DEFAULT_CONFIG.maxQueueDepth,
1594
+ showValue: true
1595
+ },
1596
+ {
1597
+ type: "slider",
1598
+ key: "targetLoadPercent",
1599
+ label: "Target load",
1600
+ description: "Percentage of inference capacity to target before throttling FPS.",
1601
+ min: 50,
1602
+ max: 100,
1603
+ step: 5,
1604
+ default: DEFAULT_CONFIG.targetLoadPercent,
1605
+ unit: "%",
1606
+ showValue: true
1607
+ },
1608
+ {
1609
+ type: "slider",
1610
+ key: "minThrottledFps",
1611
+ label: "Min throttled FPS",
1612
+ description: "Lowest FPS the runner will allow when load-shedding.",
1613
+ min: 1,
1614
+ max: 10,
1615
+ step: 1,
1616
+ default: DEFAULT_CONFIG.minThrottledFps,
1617
+ showValue: true
1618
+ }
1619
+ ]
1620
+ }
1621
+ ]
1622
+ });
1623
+ }
1624
+ async onConfigChanged() {
1625
+ this.runner?.updateLimits(this.config);
1626
+ this.ctx.logger.info(
1627
+ "pipeline-runner tuning updated",
1628
+ {
1629
+ meta: {
1630
+ maxQueueDepth: this.config.maxQueueDepth,
1631
+ maxConcurrentInferences: this.config.maxConcurrentInferences,
1632
+ targetLoadPercent: this.config.targetLoadPercent,
1633
+ minThrottledFps: this.config.minThrottledFps
1634
+ }
1635
+ }
1636
+ );
1637
+ }
1638
+ }
1639
+ export {
1640
+ FrameQueue,
1641
+ PipelineRunner,
1642
+ PipelineTimingSampler,
1643
+ Semaphore,
1644
+ pipelineRunnerBenchActions as customActions,
1645
+ PipelineRunnerAddon as default
1646
+ };
1647
+ //# sourceMappingURL=index.mjs.map