@camstack/addon-decoder-nodeav 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunk-3GDQP6AS.mjs +16 -0
- package/dist/chunk-3GDQP6AS.mjs.map +1 -0
- package/dist/index.d.mts +293 -0
- package/dist/index.d.ts +293 -0
- package/dist/index.js +7536 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +920 -0
- package/dist/index.mjs.map +1 -0
- package/dist/lib-PHLUZNNX.mjs +6588 -0
- package/dist/lib-PHLUZNNX.mjs.map +1 -0
- package/package.json +71 -0
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,920 @@
|
|
|
1
|
+
import "./chunk-3GDQP6AS.mjs";
|
|
2
|
+
|
|
3
|
+
// src/addon/index.ts
|
|
4
|
+
import { randomUUID } from "crypto";
|
|
5
|
+
import {
|
|
6
|
+
BaseAddon,
|
|
7
|
+
DEFAULT_DECODER_HWACCEL_CONFIG,
|
|
8
|
+
HWACCEL_OPTIONS,
|
|
9
|
+
decoderCapability,
|
|
10
|
+
RingBuffer
|
|
11
|
+
} from "@camstack/types";
|
|
12
|
+
|
|
13
|
+
// src/nodeav-decoder-session.ts
|
|
14
|
+
import { errMsg } from "@camstack/types";
|
|
15
|
+
function backendToHwDeviceConst(backend, consts) {
|
|
16
|
+
switch (backend) {
|
|
17
|
+
case "videotoolbox":
|
|
18
|
+
return consts.AV_HWDEVICE_TYPE_VIDEOTOOLBOX;
|
|
19
|
+
case "cuda":
|
|
20
|
+
return consts.AV_HWDEVICE_TYPE_CUDA;
|
|
21
|
+
case "nvdec":
|
|
22
|
+
return consts.AV_HWDEVICE_TYPE_CUDA;
|
|
23
|
+
// node-av exposes only CUDA; nvdec aliases to it
|
|
24
|
+
case "vaapi":
|
|
25
|
+
return consts.AV_HWDEVICE_TYPE_VAAPI;
|
|
26
|
+
case "qsv":
|
|
27
|
+
return consts.AV_HWDEVICE_TYPE_QSV;
|
|
28
|
+
case "d3d11va":
|
|
29
|
+
return consts.AV_HWDEVICE_TYPE_D3D11VA;
|
|
30
|
+
case "dxva2":
|
|
31
|
+
return consts.AV_HWDEVICE_TYPE_DXVA2;
|
|
32
|
+
case "amf":
|
|
33
|
+
return consts.AV_HWDEVICE_TYPE_AMF;
|
|
34
|
+
case "vdpau":
|
|
35
|
+
return consts.AV_HWDEVICE_TYPE_VDPAU;
|
|
36
|
+
case "drm":
|
|
37
|
+
return consts.AV_HWDEVICE_TYPE_DRM;
|
|
38
|
+
default:
|
|
39
|
+
return null;
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
function backendToHwPixFmt(backend, consts) {
|
|
43
|
+
switch (backend) {
|
|
44
|
+
case "videotoolbox":
|
|
45
|
+
return consts.AV_PIX_FMT_VIDEOTOOLBOX;
|
|
46
|
+
case "cuda":
|
|
47
|
+
return consts.AV_PIX_FMT_CUDA;
|
|
48
|
+
case "nvdec":
|
|
49
|
+
return consts.AV_PIX_FMT_CUDA;
|
|
50
|
+
case "vaapi":
|
|
51
|
+
return consts.AV_PIX_FMT_VAAPI;
|
|
52
|
+
case "qsv":
|
|
53
|
+
return consts.AV_PIX_FMT_QSV;
|
|
54
|
+
case "d3d11va":
|
|
55
|
+
return consts.AV_PIX_FMT_D3D11;
|
|
56
|
+
case "dxva2":
|
|
57
|
+
return consts.AV_PIX_FMT_DXVA2_VLD;
|
|
58
|
+
case "amf":
|
|
59
|
+
return consts.AV_PIX_FMT_D3D11;
|
|
60
|
+
case "vdpau":
|
|
61
|
+
return consts.AV_PIX_FMT_VDPAU;
|
|
62
|
+
case "drm":
|
|
63
|
+
return consts.AV_PIX_FMT_DRM_PRIME;
|
|
64
|
+
default:
|
|
65
|
+
return null;
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
var _nav = null;
|
|
69
|
+
var _consts = null;
|
|
70
|
+
var _sharp = null;
|
|
71
|
+
async function getNodeAv() {
|
|
72
|
+
if (!_nav) _nav = await import("node-av");
|
|
73
|
+
return _nav;
|
|
74
|
+
}
|
|
75
|
+
async function getConstants() {
|
|
76
|
+
if (!_consts) _consts = await import("node-av/constants");
|
|
77
|
+
return _consts;
|
|
78
|
+
}
|
|
79
|
+
async function getSharp() {
|
|
80
|
+
if (!_sharp) {
|
|
81
|
+
const mod = await import("./lib-PHLUZNNX.mjs");
|
|
82
|
+
_sharp = mod.default;
|
|
83
|
+
}
|
|
84
|
+
return _sharp;
|
|
85
|
+
}
|
|
86
|
+
var noopLogger = {
|
|
87
|
+
debug() {
|
|
88
|
+
},
|
|
89
|
+
info() {
|
|
90
|
+
},
|
|
91
|
+
warn() {
|
|
92
|
+
},
|
|
93
|
+
error() {
|
|
94
|
+
},
|
|
95
|
+
child() {
|
|
96
|
+
return noopLogger;
|
|
97
|
+
},
|
|
98
|
+
withTags() {
|
|
99
|
+
return noopLogger;
|
|
100
|
+
}
|
|
101
|
+
};
|
|
102
|
+
var NodeAvDecoderSession = class _NodeAvDecoderSession {
|
|
103
|
+
config;
|
|
104
|
+
logger;
|
|
105
|
+
frameCallbacks = /* @__PURE__ */ new Set();
|
|
106
|
+
destroyed = false;
|
|
107
|
+
// Low-level node-av objects (initialized on first keyframe)
|
|
108
|
+
parser = null;
|
|
109
|
+
codecCtx = null;
|
|
110
|
+
scaler = null;
|
|
111
|
+
avPacket = null;
|
|
112
|
+
avFrame = null;
|
|
113
|
+
dstFrame = null;
|
|
114
|
+
// Cached constants (loaded during init, used in hot path)
|
|
115
|
+
// Hard-coded numeric defaults are replaced in initDecoder() with the real
|
|
116
|
+
// branded constants from `node-av/constants`. The `as` cast at the literal
|
|
117
|
+
// is a one-time branding bridge — once initDecoder runs we hold the exact
|
|
118
|
+
// AVPixelFormat/SwsFlags/AVError values.
|
|
119
|
+
EAGAIN = -11;
|
|
120
|
+
PIX_FMT_GRAY8 = 8;
|
|
121
|
+
PIX_FMT_RGB24 = 2;
|
|
122
|
+
SWS_FAST_BILINEAR = 1;
|
|
123
|
+
/**
|
|
124
|
+
* Decoder output mode. Drives both the scaler's destination pixel
|
|
125
|
+
* format and whether sharp runs the JPEG encode at the end:
|
|
126
|
+
*
|
|
127
|
+
* - `'jpeg'` — scaler→RGB24 → sharp encode → emit JPEG bytes
|
|
128
|
+
* - `'rgb'` — scaler→RGB24 → emit raw RGB24 (no sharp)
|
|
129
|
+
* - `'gray'` — scaler→GRAY8 → emit raw GRAY8 (no sharp)
|
|
130
|
+
*
|
|
131
|
+
* The broker holds the policy decision on which mode to request based
|
|
132
|
+
* on its active subscribers; on-the-fly conversion (e.g. RGB→JPEG for
|
|
133
|
+
* a WebRTC consumer that joined while detection holds the decoder in
|
|
134
|
+
* RGB mode) happens broker-side via the per-frame conversion cache.
|
|
135
|
+
*/
|
|
136
|
+
outputMode;
|
|
137
|
+
sharpFn = null;
|
|
138
|
+
/**
|
|
139
|
+
* Backpressure for the sharp JPEG encode pipeline. The broker
|
|
140
|
+
* currently creates sessions with `maxFps: 0` (unlimited) and relies
|
|
141
|
+
* on per-subscriber throttling, so without a bound the
|
|
142
|
+
* fire-and-forget `sharp(...).toBuffer()` chain would accumulate
|
|
143
|
+
* unboundedly whenever sharp falls behind the decoder. Cap at
|
|
144
|
+
* `MAX_JPEG_INFLIGHT` pending encodes per session — any frame that
|
|
145
|
+
* arrives while the cap is saturated is dropped and counted.
|
|
146
|
+
*/
|
|
147
|
+
static MAX_JPEG_INFLIGHT = 2;
|
|
148
|
+
jpegEncodeInFlight = 0;
|
|
149
|
+
/**
|
|
150
|
+
* Map a `DecoderSessionConfig.outputFormat` value to one of the three
|
|
151
|
+
* native scaler/encoder modes the session understands. The cap-level
|
|
152
|
+
* format vocabulary is broader (it accepts `bgr`, `yuv420`) than what
|
|
153
|
+
* libav's scaler is wired for here — anything else degrades to RGB
|
|
154
|
+
* (the canonical raw mode) and the broker is expected to convert
|
|
155
|
+
* downstream if a subscriber needs a different shape.
|
|
156
|
+
*/
|
|
157
|
+
static resolveOutputMode(format) {
|
|
158
|
+
if (format === "jpeg" || format === void 0) return "jpeg";
|
|
159
|
+
if (format === "gray") return "gray";
|
|
160
|
+
return "rgb";
|
|
161
|
+
}
|
|
162
|
+
initialized = false;
|
|
163
|
+
initializing = false;
|
|
164
|
+
scalerInitializing = false;
|
|
165
|
+
/**
|
|
166
|
+
* Monotonic counter incremented by `updateConfig` whenever the
|
|
167
|
+
* scaler + dstFrame get invalidated (e.g. output format toggle).
|
|
168
|
+
* `initScaler` captures the current value at entry and aborts — or
|
|
169
|
+
* disposes the locally-built scaler — if the epoch moved while
|
|
170
|
+
* its async init was in flight. Without this, a toggle racing an
|
|
171
|
+
* in-flight init could leave two scalers allocated natively while
|
|
172
|
+
* `this.scaler` only holds a reference to one → libav leak.
|
|
173
|
+
*/
|
|
174
|
+
scalerEpoch = 0;
|
|
175
|
+
/**
|
|
176
|
+
* One-shot guard for the "first frame" diagnostic log + raw frame
|
|
177
|
+
* dump. Setting this synchronously inside `emitDecodedFrame`
|
|
178
|
+
* prevents re-entry — without it we were using `outputFrames === 0`
|
|
179
|
+
* which stays true until the async sharp encode callback runs, so
|
|
180
|
+
* several decoded frames could trigger the dump before the first
|
|
181
|
+
* JPEG landed.
|
|
182
|
+
*/
|
|
183
|
+
firstFrameLogged = false;
|
|
184
|
+
// Output dimensions
|
|
185
|
+
outWidth = 0;
|
|
186
|
+
outHeight = 0;
|
|
187
|
+
// FPS limiter
|
|
188
|
+
lastEmitTime = 0;
|
|
189
|
+
minIntervalMs;
|
|
190
|
+
// Stats
|
|
191
|
+
inputPackets = 0;
|
|
192
|
+
outputFrames = 0;
|
|
193
|
+
droppedFrames = 0;
|
|
194
|
+
totalDecodeTimeMs = 0;
|
|
195
|
+
startTime = Date.now();
|
|
196
|
+
hwaccelPref;
|
|
197
|
+
hwaccelResolver;
|
|
198
|
+
/** The backend that actually initialised successfully — `'none'` = software fallback. */
|
|
199
|
+
activeHwAccel = "none";
|
|
200
|
+
hwDevice = null;
|
|
201
|
+
swTransferFrame = null;
|
|
202
|
+
constructor(config, logger = noopLogger, options) {
|
|
203
|
+
this.config = { ...config };
|
|
204
|
+
const sessionTags = {};
|
|
205
|
+
if (typeof config.deviceId === "number") sessionTags["deviceId"] = config.deviceId;
|
|
206
|
+
if (typeof config.tag === "string" && config.tag.length > 0) sessionTags["tag"] = config.tag;
|
|
207
|
+
this.logger = Object.keys(sessionTags).length > 0 ? logger.withTags(sessionTags) : logger;
|
|
208
|
+
this.minIntervalMs = config.maxFps > 0 ? 1e3 / config.maxFps : 0;
|
|
209
|
+
this.outputMode = _NodeAvDecoderSession.resolveOutputMode(config.outputFormat);
|
|
210
|
+
this.hwaccelPref = options?.hwaccel ?? "auto";
|
|
211
|
+
this.hwaccelResolver = options?.hwaccelResolver ?? null;
|
|
212
|
+
}
|
|
213
|
+
/**
|
|
214
|
+
* Resolve the backend preference list and try each one against
|
|
215
|
+
* node-av's HW context APIs. The first backend whose
|
|
216
|
+
* `HardwareDeviceContext.create()` succeeds gets attached to
|
|
217
|
+
* `codecCtx.hwDeviceCtx` + its hw pixel format registered via
|
|
218
|
+
* `setHardwarePixelFormat`. On any failure, falls through to the
|
|
219
|
+
* next backend; if all fail, returns with `activeHwAccel='none'`
|
|
220
|
+
* and the decoder runs in software on the same context.
|
|
221
|
+
*/
|
|
222
|
+
async tryAttachHwAccel(nav, C) {
|
|
223
|
+
if (!this.codecCtx) return;
|
|
224
|
+
if (this.hwaccelPref === "none") {
|
|
225
|
+
this.activeHwAccel = "none";
|
|
226
|
+
return;
|
|
227
|
+
}
|
|
228
|
+
const explicit = this.hwaccelPref === "auto" ? null : this.hwaccelPref;
|
|
229
|
+
const resolution = this.hwaccelResolver ? await this.hwaccelResolver.resolve(explicit) : explicit ? { preferred: [explicit], rationale: "explicit (no resolver)" } : { preferred: [], rationale: "auto + no resolver \u2192 software" };
|
|
230
|
+
if (resolution.preferred.length === 0) {
|
|
231
|
+
this.activeHwAccel = "none";
|
|
232
|
+
return;
|
|
233
|
+
}
|
|
234
|
+
for (const backend of resolution.preferred) {
|
|
235
|
+
const deviceType = backendToHwDeviceConst(backend, C);
|
|
236
|
+
const hwPixFmt = backendToHwPixFmt(backend, C);
|
|
237
|
+
if (!deviceType || !hwPixFmt) continue;
|
|
238
|
+
const device = new nav.HardwareDeviceContext();
|
|
239
|
+
const rc = device.create(deviceType);
|
|
240
|
+
if (rc < 0) {
|
|
241
|
+
this.logger.warn("node-av: hwaccel device create failed \u2014 trying next", {
|
|
242
|
+
meta: { backend, rc }
|
|
243
|
+
});
|
|
244
|
+
device.free();
|
|
245
|
+
continue;
|
|
246
|
+
}
|
|
247
|
+
try {
|
|
248
|
+
this.codecCtx.hwDeviceCtx = device;
|
|
249
|
+
this.codecCtx.setHardwarePixelFormat(hwPixFmt);
|
|
250
|
+
} catch (err) {
|
|
251
|
+
this.logger.warn("node-av: hwaccel context attach failed \u2014 trying next", {
|
|
252
|
+
meta: { backend, error: errMsg(err) }
|
|
253
|
+
});
|
|
254
|
+
device.free();
|
|
255
|
+
continue;
|
|
256
|
+
}
|
|
257
|
+
this.hwDevice = device;
|
|
258
|
+
this.activeHwAccel = backend;
|
|
259
|
+
return;
|
|
260
|
+
}
|
|
261
|
+
this.logger.warn("node-av: no hwaccel backend initialised \u2014 using software", {
|
|
262
|
+
meta: { attempted: resolution.preferred.join(","), rationale: resolution.rationale }
|
|
263
|
+
});
|
|
264
|
+
this.activeHwAccel = "none";
|
|
265
|
+
}
|
|
266
|
+
/**
|
|
267
|
+
* Download a HW frame (format == hw pix fmt) into a SW frame so the
|
|
268
|
+
* rest of the pipeline (scaler, JPEG encoder, grayscale passthrough)
|
|
269
|
+
* handles it identically to the pure-software path. Uses the sync
|
|
270
|
+
* variant so the synchronous receive loop below doesn't need to be
|
|
271
|
+
* async-ified. Returns `null` on transfer failure, meaning the
|
|
272
|
+
* caller should drop the frame.
|
|
273
|
+
*/
|
|
274
|
+
transferHwFrame(hwFrame) {
|
|
275
|
+
if (this.activeHwAccel === "none" || !this.swTransferFrame) return hwFrame;
|
|
276
|
+
const rc = hwFrame.hwframeTransferDataSync(this.swTransferFrame, 0);
|
|
277
|
+
if (rc < 0) {
|
|
278
|
+
this.logger.warn("node-av: hwframeTransferData failed", { meta: { rc } });
|
|
279
|
+
return null;
|
|
280
|
+
}
|
|
281
|
+
return this.swTransferFrame;
|
|
282
|
+
}
|
|
283
|
+
/**
|
|
284
|
+
* Initialize the decoder pipeline on the first keyframe.
|
|
285
|
+
* After this returns, all hot-path methods are fully synchronous (except JPEG encode).
|
|
286
|
+
*/
|
|
287
|
+
async initDecoder() {
|
|
288
|
+
if (this.initialized || this.initializing || this.destroyed) return;
|
|
289
|
+
this.initializing = true;
|
|
290
|
+
try {
|
|
291
|
+
const nav = await getNodeAv();
|
|
292
|
+
const C = await getConstants();
|
|
293
|
+
this.EAGAIN = C.AVERROR_EAGAIN;
|
|
294
|
+
this.PIX_FMT_GRAY8 = C.AV_PIX_FMT_GRAY8;
|
|
295
|
+
this.PIX_FMT_RGB24 = C.AV_PIX_FMT_RGB24;
|
|
296
|
+
this.SWS_FAST_BILINEAR = C.SWS_FAST_BILINEAR;
|
|
297
|
+
if (this.outputMode === "jpeg") {
|
|
298
|
+
this.sharpFn = await getSharp();
|
|
299
|
+
}
|
|
300
|
+
nav.Log.setLevel(C.AV_LOG_FATAL);
|
|
301
|
+
const isHevc = this.config.codec === "h265" || this.config.codec === "hevc";
|
|
302
|
+
const codecId = isHevc ? C.AV_CODEC_ID_HEVC : C.AV_CODEC_ID_H264;
|
|
303
|
+
this.parser = new nav.CodecParser();
|
|
304
|
+
this.parser.init(codecId);
|
|
305
|
+
const codec = nav.Codec.findDecoder(codecId);
|
|
306
|
+
if (!codec) {
|
|
307
|
+
this.logger.error("node-av: no decoder found", { meta: { codec: this.config.codec } });
|
|
308
|
+
return;
|
|
309
|
+
}
|
|
310
|
+
this.codecCtx = new nav.CodecContext();
|
|
311
|
+
this.codecCtx.allocContext3(codec);
|
|
312
|
+
if (this.config.width && this.config.height) {
|
|
313
|
+
this.codecCtx.width = this.config.width;
|
|
314
|
+
this.codecCtx.height = this.config.height;
|
|
315
|
+
}
|
|
316
|
+
this.codecCtx.threadCount = 1;
|
|
317
|
+
await this.tryAttachHwAccel(nav, C);
|
|
318
|
+
const ret = await this.codecCtx.open2(codec);
|
|
319
|
+
if (ret < 0) {
|
|
320
|
+
if (this.activeHwAccel !== "none") {
|
|
321
|
+
this.logger.warn("node-av: open2 failed with hwaccel \u2014 retrying in software", {
|
|
322
|
+
meta: { ret, hwAccel: this.activeHwAccel }
|
|
323
|
+
});
|
|
324
|
+
this.hwDevice?.free();
|
|
325
|
+
this.hwDevice = null;
|
|
326
|
+
this.activeHwAccel = "none";
|
|
327
|
+
this.codecCtx = new nav.CodecContext();
|
|
328
|
+
this.codecCtx.allocContext3(codec);
|
|
329
|
+
if (this.config.width && this.config.height) {
|
|
330
|
+
this.codecCtx.width = this.config.width;
|
|
331
|
+
this.codecCtx.height = this.config.height;
|
|
332
|
+
}
|
|
333
|
+
this.codecCtx.threadCount = 1;
|
|
334
|
+
const retry = await this.codecCtx.open2(codec);
|
|
335
|
+
if (retry < 0) {
|
|
336
|
+
this.logger.error("node-av: failed to open decoder (sw fallback)", { meta: { ret: retry } });
|
|
337
|
+
return;
|
|
338
|
+
}
|
|
339
|
+
} else {
|
|
340
|
+
this.logger.error("node-av: failed to open decoder", { meta: { ret } });
|
|
341
|
+
return;
|
|
342
|
+
}
|
|
343
|
+
}
|
|
344
|
+
if (this.activeHwAccel !== "none") {
|
|
345
|
+
this.swTransferFrame = new nav.Frame();
|
|
346
|
+
this.swTransferFrame.alloc();
|
|
347
|
+
}
|
|
348
|
+
this.avPacket = new nav.Packet();
|
|
349
|
+
this.avPacket.alloc();
|
|
350
|
+
this.avFrame = new nav.Frame();
|
|
351
|
+
this.avFrame.alloc();
|
|
352
|
+
this.initialized = true;
|
|
353
|
+
this.logger.info("node-av push decoder initialized", {
|
|
354
|
+
meta: {
|
|
355
|
+
codec: this.config.codec,
|
|
356
|
+
output: this.outputMode,
|
|
357
|
+
// Reports the backend that actually succeeded at
|
|
358
|
+
// `open2(codec)` with `hwDeviceCtx` attached, or `'none'` if
|
|
359
|
+
// we fell back to software (explicit `hwaccel: 'none'`
|
|
360
|
+
// override, empty resolver output, or every attempted
|
|
361
|
+
// backend failed init).
|
|
362
|
+
hwAccel: this.activeHwAccel
|
|
363
|
+
}
|
|
364
|
+
});
|
|
365
|
+
} catch (err) {
|
|
366
|
+
this.logger.error("node-av init error", { meta: { error: errMsg(err) } });
|
|
367
|
+
} finally {
|
|
368
|
+
this.initializing = false;
|
|
369
|
+
}
|
|
370
|
+
}
|
|
371
|
+
/**
|
|
372
|
+
* Initialize the scaler after the first frame tells us the actual
|
|
373
|
+
* dimensions. Output pixel format: RGB24 for JPEG encoding, GRAY8
|
|
374
|
+
* for raw motion.
|
|
375
|
+
*
|
|
376
|
+
* Builds `scaler` + `dstFrame` on local variables and publishes
|
|
377
|
+
* them onto `this` in a single atomic step at the end. Captures
|
|
378
|
+
* `scalerEpoch` at entry; if `updateConfig` invalidated the scaler
|
|
379
|
+
* while this init was in flight (epoch mismatch), the locally
|
|
380
|
+
* built pair is disposed and discarded so the later init wins.
|
|
381
|
+
* Without the local-first approach, partial state (scaler set,
|
|
382
|
+
* dstFrame still null) could be observed by a concurrent
|
|
383
|
+
* `emitDecodedFrame` call.
|
|
384
|
+
*/
|
|
385
|
+
async initScaler(srcW, srcH, srcFmt) {
|
|
386
|
+
if (this.scalerInitializing) return;
|
|
387
|
+
this.scalerInitializing = true;
|
|
388
|
+
const myEpoch = this.scalerEpoch;
|
|
389
|
+
let localScaler = null;
|
|
390
|
+
let localDstFrame = null;
|
|
391
|
+
try {
|
|
392
|
+
const nav = await getNodeAv();
|
|
393
|
+
if (this.destroyed || myEpoch !== this.scalerEpoch) return;
|
|
394
|
+
const scale = this.config.scale > 1 ? this.config.scale : 1;
|
|
395
|
+
const maxW = Math.floor(640 / scale);
|
|
396
|
+
const outWidth = Math.min(srcW, maxW);
|
|
397
|
+
const outHeight = Math.round(outWidth * srcH / srcW);
|
|
398
|
+
const dstFmt = this.outputMode === "gray" ? this.PIX_FMT_GRAY8 : this.PIX_FMT_RGB24;
|
|
399
|
+
const fmtName = this.outputMode === "gray" ? "gray8" : "rgb24";
|
|
400
|
+
localScaler = new nav.SoftwareScaleContext();
|
|
401
|
+
localScaler.getContext(
|
|
402
|
+
srcW,
|
|
403
|
+
srcH,
|
|
404
|
+
srcFmt,
|
|
405
|
+
outWidth,
|
|
406
|
+
outHeight,
|
|
407
|
+
dstFmt,
|
|
408
|
+
this.SWS_FAST_BILINEAR
|
|
409
|
+
);
|
|
410
|
+
const ret = localScaler.initContext();
|
|
411
|
+
if (ret < 0) {
|
|
412
|
+
this.logger.error("node-av: sws_init_context failed", { meta: { ret } });
|
|
413
|
+
return;
|
|
414
|
+
}
|
|
415
|
+
localDstFrame = new nav.Frame();
|
|
416
|
+
localDstFrame.alloc();
|
|
417
|
+
localDstFrame.width = outWidth;
|
|
418
|
+
localDstFrame.height = outHeight;
|
|
419
|
+
localDstFrame.format = dstFmt;
|
|
420
|
+
const allocRet = localDstFrame.allocBuffer();
|
|
421
|
+
if (allocRet < 0) {
|
|
422
|
+
this.logger.error("node-av: dst frame allocBuffer failed", { meta: { ret: allocRet } });
|
|
423
|
+
return;
|
|
424
|
+
}
|
|
425
|
+
if (this.destroyed || myEpoch !== this.scalerEpoch) return;
|
|
426
|
+
this.scaler?.[Symbol.dispose]?.();
|
|
427
|
+
this.dstFrame?.[Symbol.dispose]?.();
|
|
428
|
+
this.scaler = localScaler;
|
|
429
|
+
this.dstFrame = localDstFrame;
|
|
430
|
+
this.outWidth = outWidth;
|
|
431
|
+
this.outHeight = outHeight;
|
|
432
|
+
localScaler = null;
|
|
433
|
+
localDstFrame = null;
|
|
434
|
+
this.logger.info("node-av scaler initialized", {
|
|
435
|
+
meta: { srcWidth: srcW, srcHeight: srcH, outWidth, outHeight, format: fmtName }
|
|
436
|
+
});
|
|
437
|
+
} catch (err) {
|
|
438
|
+
this.logger.error("Scaler init failed", { meta: { error: errMsg(err) } });
|
|
439
|
+
} finally {
|
|
440
|
+
localScaler?.[Symbol.dispose]?.();
|
|
441
|
+
localDstFrame?.[Symbol.dispose]?.();
|
|
442
|
+
this.scalerInitializing = false;
|
|
443
|
+
}
|
|
444
|
+
}
|
|
445
|
+
pushPacket(packet) {
|
|
446
|
+
if (this.destroyed) return;
|
|
447
|
+
this.inputPackets++;
|
|
448
|
+
if (!this.initialized && !this.initializing && packet.keyframe) {
|
|
449
|
+
this.initDecoder().then(() => {
|
|
450
|
+
if (this.initialized) this.decodeRawData(packet.data, packet.pts ?? Date.now());
|
|
451
|
+
}).catch((err) => {
|
|
452
|
+
this.logger.error("node-av init failed", { meta: { error: errMsg(err) } });
|
|
453
|
+
});
|
|
454
|
+
return;
|
|
455
|
+
}
|
|
456
|
+
if (!this.initialized) return;
|
|
457
|
+
this.decodeRawData(packet.data, packet.pts ?? Date.now());
|
|
458
|
+
}
|
|
459
|
+
decodeRawData(data, pts) {
|
|
460
|
+
if (!this.parser || !this.codecCtx || !this.avPacket || !this.avFrame) return;
|
|
461
|
+
const buf = Buffer.isBuffer(data) ? data : Buffer.from(data);
|
|
462
|
+
const bigPts = BigInt(pts);
|
|
463
|
+
let offset = 0;
|
|
464
|
+
while (offset < buf.length) {
|
|
465
|
+
const remaining = buf.subarray(offset);
|
|
466
|
+
const consumed = this.parser.parse2(
|
|
467
|
+
this.codecCtx,
|
|
468
|
+
this.avPacket,
|
|
469
|
+
remaining,
|
|
470
|
+
bigPts,
|
|
471
|
+
bigPts,
|
|
472
|
+
offset
|
|
473
|
+
);
|
|
474
|
+
if (consumed < 0) {
|
|
475
|
+
this.logger.warn("node-av parser error", { meta: { ret: consumed } });
|
|
476
|
+
break;
|
|
477
|
+
}
|
|
478
|
+
offset += consumed;
|
|
479
|
+
if (this.avPacket.size > 0) {
|
|
480
|
+
this.decodePacket();
|
|
481
|
+
this.avPacket.unref();
|
|
482
|
+
}
|
|
483
|
+
}
|
|
484
|
+
}
|
|
485
|
+
decodePacket() {
|
|
486
|
+
if (!this.codecCtx || !this.avFrame) return;
|
|
487
|
+
const sendRet = this.codecCtx.sendPacketSync(this.avPacket);
|
|
488
|
+
if (sendRet < 0 && sendRet !== this.EAGAIN) {
|
|
489
|
+
this.logger.warn("node-av sendPacket error", { meta: { ret: sendRet } });
|
|
490
|
+
return;
|
|
491
|
+
}
|
|
492
|
+
while (true) {
|
|
493
|
+
const recvRet = this.codecCtx.receiveFrameSync(this.avFrame);
|
|
494
|
+
if (recvRet < 0) break;
|
|
495
|
+
const frame = this.transferHwFrame(this.avFrame);
|
|
496
|
+
if (!frame) continue;
|
|
497
|
+
this.emitDecodedFrame(frame);
|
|
498
|
+
}
|
|
499
|
+
}
|
|
500
|
+
emitDecodedFrame(frame) {
|
|
501
|
+
const now = performance.now();
|
|
502
|
+
if (this.minIntervalMs > 0 && now - this.lastEmitTime < this.minIntervalMs) {
|
|
503
|
+
this.droppedFrames++;
|
|
504
|
+
return;
|
|
505
|
+
}
|
|
506
|
+
if (!this.scaler && !this.scalerInitializing) {
|
|
507
|
+
this.initScaler(frame.width, frame.height, frame.format);
|
|
508
|
+
return;
|
|
509
|
+
}
|
|
510
|
+
if (!this.dstFrame || !this.scaler) return;
|
|
511
|
+
const decodeStart = performance.now();
|
|
512
|
+
try {
|
|
513
|
+
this.dstFrame.makeWritable();
|
|
514
|
+
this.scaler.scaleFrameSync(this.dstFrame, frame);
|
|
515
|
+
} catch (err) {
|
|
516
|
+
this.logger.warn("node-av scale error", { meta: { error: errMsg(err) } });
|
|
517
|
+
return;
|
|
518
|
+
}
|
|
519
|
+
if (!this.firstFrameLogged) {
|
|
520
|
+
this.firstFrameLogged = true;
|
|
521
|
+
const channels = this.outputMode === "gray" ? 1 : 3;
|
|
522
|
+
const ls = this.dstFrame.linesize;
|
|
523
|
+
const buf = this.dstFrame.toBuffer();
|
|
524
|
+
this.logger.info("dstFrame after scale", {
|
|
525
|
+
meta: {
|
|
526
|
+
phase: "frame-debug",
|
|
527
|
+
width: this.dstFrame.width,
|
|
528
|
+
height: this.dstFrame.height,
|
|
529
|
+
linesize: [ls[0], ls[1], ls[2]],
|
|
530
|
+
expectedStride: this.dstFrame.width * channels,
|
|
531
|
+
bufLen: buf.length,
|
|
532
|
+
expectedPacked: this.dstFrame.width * channels * this.dstFrame.height,
|
|
533
|
+
srcFormat: frame.format ?? "?"
|
|
534
|
+
}
|
|
535
|
+
});
|
|
536
|
+
if (this.outputMode !== "gray") {
|
|
537
|
+
import("fs").then((fsModule) => {
|
|
538
|
+
import("path").then((pathModule) => {
|
|
539
|
+
const dumpPath = pathModule.join(process.cwd(), "camstack-data", "debug-frame-rgb24.raw");
|
|
540
|
+
fsModule.writeFileSync(dumpPath, buf);
|
|
541
|
+
this.logger.info("Dumped first RGB24 frame", { meta: { phase: "frame-debug", path: dumpPath, bytes: buf.length } });
|
|
542
|
+
}).catch(() => {
|
|
543
|
+
});
|
|
544
|
+
}).catch(() => {
|
|
545
|
+
});
|
|
546
|
+
}
|
|
547
|
+
}
|
|
548
|
+
const rawBuf = this.extractPackedBuffer(this.dstFrame);
|
|
549
|
+
if (this.outputMode === "jpeg") {
|
|
550
|
+
this.encodeAndEmitJpeg(rawBuf, decodeStart);
|
|
551
|
+
} else if (this.outputMode === "rgb") {
|
|
552
|
+
this.emitRawFrame(rawBuf, "rgb", decodeStart);
|
|
553
|
+
} else {
|
|
554
|
+
this.emitRawFrame(rawBuf, "gray", decodeStart);
|
|
555
|
+
}
|
|
556
|
+
}
|
|
557
|
+
/**
|
|
558
|
+
* Extract packed pixel buffer from a decoded frame.
|
|
559
|
+
* FFmpeg's av_frame_get_buffer() may pad each row to alignment (32/64 bytes).
|
|
560
|
+
* Sharp and WASM consumers expect tightly-packed rows (stride = width * channels).
|
|
561
|
+
* If linesize matches expected stride, return the buffer directly (zero-copy).
|
|
562
|
+
*/
|
|
563
|
+
extractPackedBuffer(frame) {
|
|
564
|
+
const channels = this.outputMode === "gray" ? 1 : 3;
|
|
565
|
+
const expectedStride = frame.width * channels;
|
|
566
|
+
const actualStride = frame.linesize[0] ?? expectedStride;
|
|
567
|
+
const src = frame.data?.[0];
|
|
568
|
+
if (!src) {
|
|
569
|
+
return frame.toBuffer();
|
|
570
|
+
}
|
|
571
|
+
if (actualStride === expectedStride) {
|
|
572
|
+
return Buffer.from(src.buffer, src.byteOffset, expectedStride * frame.height);
|
|
573
|
+
}
|
|
574
|
+
const dst = Buffer.allocUnsafe(expectedStride * frame.height);
|
|
575
|
+
for (let y = 0; y < frame.height; y++) {
|
|
576
|
+
src.copy(dst, y * expectedStride, y * actualStride, y * actualStride + expectedStride);
|
|
577
|
+
}
|
|
578
|
+
return dst;
|
|
579
|
+
}
|
|
580
|
+
/**
|
|
581
|
+
* Encode RGB24 raw buffer as JPEG and emit.
|
|
582
|
+
*
|
|
583
|
+
* Drops the frame (and counts it) when `MAX_JPEG_INFLIGHT` encodes
|
|
584
|
+
* are already pending — prevents unbounded growth of the
|
|
585
|
+
* fire-and-forget promise chain when sharp cannot keep up with the
|
|
586
|
+
* decode rate.
|
|
587
|
+
*/
|
|
588
|
+
encodeAndEmitJpeg(rgb, decodeStart) {
|
|
589
|
+
if (!this.sharpFn) return;
|
|
590
|
+
if (this.jpegEncodeInFlight >= _NodeAvDecoderSession.MAX_JPEG_INFLIGHT) {
|
|
591
|
+
this.droppedFrames++;
|
|
592
|
+
return;
|
|
593
|
+
}
|
|
594
|
+
this.jpegEncodeInFlight++;
|
|
595
|
+
this.sharpFn(rgb, {
|
|
596
|
+
raw: { width: this.outWidth, height: this.outHeight, channels: 3 }
|
|
597
|
+
}).jpeg({ quality: 80, mozjpeg: false }).toBuffer().then((jpegBuf) => {
|
|
598
|
+
if (this.destroyed) return;
|
|
599
|
+
this.emitRawFrame(jpegBuf, "jpeg", decodeStart);
|
|
600
|
+
}).catch((err) => {
|
|
601
|
+
this.logger.warn("sharp jpeg encode error", { meta: { error: errMsg(err) } });
|
|
602
|
+
}).finally(() => {
|
|
603
|
+
this.jpegEncodeInFlight--;
|
|
604
|
+
});
|
|
605
|
+
}
|
|
606
|
+
emitRawFrame(data, format, decodeStart) {
|
|
607
|
+
const decodeMs = performance.now() - decodeStart;
|
|
608
|
+
this.totalDecodeTimeMs += decodeMs;
|
|
609
|
+
this.outputFrames++;
|
|
610
|
+
this.lastEmitTime = performance.now();
|
|
611
|
+
if (this.outputFrames === 1 || this.outputFrames % 500 === 0) {
|
|
612
|
+
this.logger.info("node-av frame emitted", {
|
|
613
|
+
meta: {
|
|
614
|
+
frameNumber: this.outputFrames,
|
|
615
|
+
width: this.outWidth,
|
|
616
|
+
height: this.outHeight,
|
|
617
|
+
format,
|
|
618
|
+
decodeMs,
|
|
619
|
+
subs: this.frameCallbacks.size
|
|
620
|
+
}
|
|
621
|
+
});
|
|
622
|
+
}
|
|
623
|
+
const decodedFrame = {
|
|
624
|
+
data,
|
|
625
|
+
width: this.outWidth,
|
|
626
|
+
height: this.outHeight,
|
|
627
|
+
format,
|
|
628
|
+
timestamp: Date.now()
|
|
629
|
+
};
|
|
630
|
+
for (const cb of this.frameCallbacks) {
|
|
631
|
+
cb(decodedFrame);
|
|
632
|
+
}
|
|
633
|
+
}
|
|
634
|
+
onFrame(callback) {
|
|
635
|
+
this.frameCallbacks.add(callback);
|
|
636
|
+
return () => {
|
|
637
|
+
this.frameCallbacks.delete(callback);
|
|
638
|
+
};
|
|
639
|
+
}
|
|
640
|
+
updateConfig(update) {
|
|
641
|
+
const prevFormat = this.config.outputFormat;
|
|
642
|
+
this.config = { ...this.config, ...update };
|
|
643
|
+
if (update.maxFps !== void 0) {
|
|
644
|
+
this.minIntervalMs = update.maxFps > 0 ? 1e3 / update.maxFps : 0;
|
|
645
|
+
}
|
|
646
|
+
if (update.outputFormat !== void 0 && update.outputFormat !== prevFormat) {
|
|
647
|
+
const prevMode = this.outputMode;
|
|
648
|
+
this.outputMode = _NodeAvDecoderSession.resolveOutputMode(update.outputFormat);
|
|
649
|
+
if (this.outputMode === prevMode) return;
|
|
650
|
+
this.scalerEpoch++;
|
|
651
|
+
if (this.scaler) {
|
|
652
|
+
this.scaler[Symbol.dispose]?.();
|
|
653
|
+
this.scaler = null;
|
|
654
|
+
}
|
|
655
|
+
if (this.dstFrame) {
|
|
656
|
+
this.dstFrame[Symbol.dispose]?.();
|
|
657
|
+
this.dstFrame = null;
|
|
658
|
+
}
|
|
659
|
+
if (this.outputMode === "jpeg" && !this.sharpFn) {
|
|
660
|
+
getSharp().then((fn) => {
|
|
661
|
+
this.sharpFn = fn;
|
|
662
|
+
}).catch(() => {
|
|
663
|
+
});
|
|
664
|
+
}
|
|
665
|
+
this.logger.info("node-av: output format changed \u2014 scaler will reinit", {
|
|
666
|
+
meta: { from: prevFormat, to: update.outputFormat, mode: this.outputMode }
|
|
667
|
+
});
|
|
668
|
+
}
|
|
669
|
+
}
|
|
670
|
+
async destroy() {
|
|
671
|
+
if (this.destroyed) return;
|
|
672
|
+
this.destroyed = true;
|
|
673
|
+
this.frameCallbacks.clear();
|
|
674
|
+
this.dstFrame?.[Symbol.dispose]?.();
|
|
675
|
+
this.avFrame?.[Symbol.dispose]?.();
|
|
676
|
+
this.avPacket?.[Symbol.dispose]?.();
|
|
677
|
+
this.scaler?.[Symbol.dispose]?.();
|
|
678
|
+
this.parser?.[Symbol.dispose]?.();
|
|
679
|
+
this.swTransferFrame?.[Symbol.dispose]?.();
|
|
680
|
+
this.codecCtx?.[Symbol.dispose]?.();
|
|
681
|
+
this.hwDevice?.free();
|
|
682
|
+
this.dstFrame = null;
|
|
683
|
+
this.avFrame = null;
|
|
684
|
+
this.avPacket = null;
|
|
685
|
+
this.scaler = null;
|
|
686
|
+
this.parser = null;
|
|
687
|
+
this.codecCtx = null;
|
|
688
|
+
this.swTransferFrame = null;
|
|
689
|
+
this.hwDevice = null;
|
|
690
|
+
}
|
|
691
|
+
getStats() {
|
|
692
|
+
const uptimeSec = Math.max((Date.now() - this.startTime) / 1e3, 1);
|
|
693
|
+
return {
|
|
694
|
+
inputFps: this.inputPackets / uptimeSec,
|
|
695
|
+
outputFps: this.outputFrames / uptimeSec,
|
|
696
|
+
avgDecodeTimeMs: this.outputFrames > 0 ? this.totalDecodeTimeMs / this.outputFrames : 0,
|
|
697
|
+
droppedFrames: this.droppedFrames
|
|
698
|
+
};
|
|
699
|
+
}
|
|
700
|
+
get isPullMode() {
|
|
701
|
+
return false;
|
|
702
|
+
}
|
|
703
|
+
};
|
|
704
|
+
|
|
705
|
+
// src/addon/index.ts
|
|
706
|
+
var FRAME_BUFFER_CAPACITY = 32;
|
|
707
|
+
var DecoderNodeAvAddon = class extends BaseAddon {
|
|
708
|
+
sessions = /* @__PURE__ */ new Map();
|
|
709
|
+
frameBuffers = /* @__PURE__ */ new Map();
|
|
710
|
+
unsubscribers = /* @__PURE__ */ new Map();
|
|
711
|
+
sessionMeta = /* @__PURE__ */ new Map();
|
|
712
|
+
constructor() {
|
|
713
|
+
super(DEFAULT_DECODER_HWACCEL_CONFIG);
|
|
714
|
+
}
|
|
715
|
+
globalSettingsSchema() {
|
|
716
|
+
return this.schema({
|
|
717
|
+
sections: [{
|
|
718
|
+
id: "hwaccel",
|
|
719
|
+
title: "Hardware acceleration",
|
|
720
|
+
tab: "decoder",
|
|
721
|
+
description: 'Backend used by node-av decoder sessions. "Auto" defers to the probed best; concrete backends force it. Changes apply to NEW sessions \u2014 existing sessions keep the backend they were created with.',
|
|
722
|
+
fields: [
|
|
723
|
+
this.field({
|
|
724
|
+
type: "select",
|
|
725
|
+
key: "hwaccel",
|
|
726
|
+
label: "Preferred backend",
|
|
727
|
+
options: [...HWACCEL_OPTIONS],
|
|
728
|
+
default: "auto",
|
|
729
|
+
immediate: true
|
|
730
|
+
}),
|
|
731
|
+
this.field({
|
|
732
|
+
type: "text",
|
|
733
|
+
key: "probedBestHwaccel",
|
|
734
|
+
label: "Probed best",
|
|
735
|
+
description: "Auto-detected best decoder backend on this host. Click the refresh icon to re-run the probe.",
|
|
736
|
+
readonlyField: true,
|
|
737
|
+
default: "",
|
|
738
|
+
actions: [
|
|
739
|
+
{ action: "reprobe-hwaccel", icon: "refresh-cw", tooltip: "Re-probe hwaccel" }
|
|
740
|
+
]
|
|
741
|
+
})
|
|
742
|
+
]
|
|
743
|
+
}]
|
|
744
|
+
});
|
|
745
|
+
}
|
|
746
|
+
async onInitialize() {
|
|
747
|
+
this.ctx.logger.info("node-av decoder addon initialized");
|
|
748
|
+
if (!this.config.probedBestHwaccel) {
|
|
749
|
+
this.reprobeHwaccel().catch((err) => {
|
|
750
|
+
this.ctx.logger.warn("nodeav: auto-reprobe hwaccel failed", {
|
|
751
|
+
meta: { error: err instanceof Error ? err.message : String(err) }
|
|
752
|
+
});
|
|
753
|
+
});
|
|
754
|
+
}
|
|
755
|
+
return [{ capability: decoderCapability, provider: this }];
|
|
756
|
+
}
|
|
757
|
+
/**
|
|
758
|
+
* Resolve the effective hwaccel backend for a new session. Reads
|
|
759
|
+
* this addon's own `hwaccel` setting first. `'auto'` defers to the
|
|
760
|
+
* session's local resolver (`ctx.kernel.hwaccel`) which probes the
|
|
761
|
+
* host and picks. No more orchestrator round-trip — decoder addon
|
|
762
|
+
* is self-sufficient for this setting as of phase 2d.
|
|
763
|
+
*/
|
|
764
|
+
resolveHwAccelPref() {
|
|
765
|
+
return this.config.hwaccel;
|
|
766
|
+
}
|
|
767
|
+
/**
|
|
768
|
+
* Re-run the platform probe on this host and persist the detected
|
|
769
|
+
* backend as `probedBestHwaccel`. The operator's `hwaccel` setting
|
|
770
|
+
* is intentionally left alone — the probe only updates the hint.
|
|
771
|
+
*/
|
|
772
|
+
async reprobeHwaccel() {
|
|
773
|
+
const resolver = this.ctx.kernel.hwaccel;
|
|
774
|
+
if (!resolver) {
|
|
775
|
+
this.ctx.logger.warn("reprobeHwaccel: no kernel hwaccel resolver \u2014 returning none");
|
|
776
|
+
await this.ctx.settings?.writeAddonStore({ probedBestHwaccel: "none" });
|
|
777
|
+
return { backend: "none" };
|
|
778
|
+
}
|
|
779
|
+
try {
|
|
780
|
+
const res = await resolver.resolve();
|
|
781
|
+
const backend = res.preferred[0] ?? "none";
|
|
782
|
+
await this.ctx.settings?.writeAddonStore({ probedBestHwaccel: backend });
|
|
783
|
+
this.ctx.logger.info("reprobeHwaccel: wrote probedBestHwaccel", {
|
|
784
|
+
meta: { backend, rationale: res.rationale, preferred: res.preferred }
|
|
785
|
+
});
|
|
786
|
+
return { backend };
|
|
787
|
+
} catch (err) {
|
|
788
|
+
this.ctx.logger.warn("reprobeHwaccel failed", {
|
|
789
|
+
meta: { error: err instanceof Error ? err.message : String(err) }
|
|
790
|
+
});
|
|
791
|
+
await this.ctx.settings?.writeAddonStore({ probedBestHwaccel: "none" });
|
|
792
|
+
return { backend: "none" };
|
|
793
|
+
}
|
|
794
|
+
}
|
|
795
|
+
async supportsCodec(input) {
|
|
796
|
+
return ["h264", "h265", "hevc"].includes(input.codec.toLowerCase());
|
|
797
|
+
}
|
|
798
|
+
async getInfo() {
|
|
799
|
+
return {
|
|
800
|
+
id: "decoder-nodeav",
|
|
801
|
+
name: "Decoder (node-av)",
|
|
802
|
+
isPullMode: false,
|
|
803
|
+
priority: 10
|
|
804
|
+
};
|
|
805
|
+
}
|
|
806
|
+
async createSession(config) {
|
|
807
|
+
const sessionId = randomUUID();
|
|
808
|
+
const hwaccel = this.resolveHwAccelPref();
|
|
809
|
+
const session = new NodeAvDecoderSession(config, this.ctx.logger, {
|
|
810
|
+
hwaccel,
|
|
811
|
+
hwaccelResolver: this.ctx.kernel.hwaccel
|
|
812
|
+
});
|
|
813
|
+
const ringBuffer = new RingBuffer(FRAME_BUFFER_CAPACITY);
|
|
814
|
+
const unsub = session.onFrame((frame) => {
|
|
815
|
+
const { format } = frame;
|
|
816
|
+
if (format !== "jpeg" && format !== "rgb" && format !== "bgr" && format !== "yuv420" && format !== "gray") return;
|
|
817
|
+
const arrayBuf = new ArrayBuffer(frame.data.byteLength);
|
|
818
|
+
new Uint8Array(arrayBuf).set(frame.data);
|
|
819
|
+
const capData = new Uint8Array(arrayBuf);
|
|
820
|
+
const capFrame = {
|
|
821
|
+
data: capData,
|
|
822
|
+
width: frame.width,
|
|
823
|
+
height: frame.height,
|
|
824
|
+
format,
|
|
825
|
+
timestamp: frame.timestamp
|
|
826
|
+
};
|
|
827
|
+
ringBuffer.push(capFrame);
|
|
828
|
+
});
|
|
829
|
+
this.sessions.set(sessionId, session);
|
|
830
|
+
this.frameBuffers.set(sessionId, ringBuffer);
|
|
831
|
+
this.unsubscribers.set(sessionId, unsub);
|
|
832
|
+
this.sessionMeta.set(sessionId, {
|
|
833
|
+
codec: config.codec,
|
|
834
|
+
outputFormat: config.outputFormat,
|
|
835
|
+
createdAtMs: Date.now()
|
|
836
|
+
});
|
|
837
|
+
this.ctx.logger.info("node-av: created session", { meta: { sessionId, codec: config.codec, hwaccelPref: hwaccel } });
|
|
838
|
+
return { sessionId, nodeId: this.ctx.kernel.localNodeId ?? "local" };
|
|
839
|
+
}
|
|
840
|
+
async destroySession(input) {
|
|
841
|
+
const { sessionId } = input;
|
|
842
|
+
const session = this.sessions.get(sessionId);
|
|
843
|
+
if (!session) {
|
|
844
|
+
throw new Error(`decoder-nodeav: unknown sessionId ${sessionId}`);
|
|
845
|
+
}
|
|
846
|
+
const unsub = this.unsubscribers.get(sessionId);
|
|
847
|
+
if (unsub) unsub();
|
|
848
|
+
await session.destroy();
|
|
849
|
+
this.sessions.delete(sessionId);
|
|
850
|
+
this.frameBuffers.delete(sessionId);
|
|
851
|
+
this.unsubscribers.delete(sessionId);
|
|
852
|
+
this.sessionMeta.delete(sessionId);
|
|
853
|
+
this.ctx.logger.info("node-av: destroyed session", { meta: { sessionId } });
|
|
854
|
+
}
|
|
855
|
+
async listActiveSessions() {
|
|
856
|
+
const out = [];
|
|
857
|
+
for (const [sessionId, meta] of this.sessionMeta) {
|
|
858
|
+
out.push({ sessionId, codec: meta.codec, outputFormat: meta.outputFormat, createdAtMs: meta.createdAtMs });
|
|
859
|
+
}
|
|
860
|
+
return out;
|
|
861
|
+
}
|
|
862
|
+
async pushPacket(input) {
|
|
863
|
+
const session = this.sessions.get(input.sessionId);
|
|
864
|
+
if (!session) {
|
|
865
|
+
throw new Error(`decoder-nodeav: unknown sessionId ${input.sessionId}`);
|
|
866
|
+
}
|
|
867
|
+
const rawData = input.packet.data;
|
|
868
|
+
const data = Buffer.isBuffer(rawData) ? rawData : rawData instanceof Uint8Array ? Buffer.from(rawData.buffer, rawData.byteOffset, rawData.byteLength) : Buffer.from(rawData);
|
|
869
|
+
session.pushPacket({ ...input.packet, data });
|
|
870
|
+
}
|
|
871
|
+
async openStream(input) {
|
|
872
|
+
const session = this.sessions.get(input.sessionId);
|
|
873
|
+
if (!session) {
|
|
874
|
+
throw new Error(`decoder-nodeav: unknown sessionId ${input.sessionId}`);
|
|
875
|
+
}
|
|
876
|
+
if (session.openStream) {
|
|
877
|
+
await session.openStream(input.url);
|
|
878
|
+
}
|
|
879
|
+
}
|
|
880
|
+
async pullFrames(input) {
|
|
881
|
+
const ringBuffer = this.frameBuffers.get(input.sessionId);
|
|
882
|
+
if (!ringBuffer) {
|
|
883
|
+
throw new Error(`decoder-nodeav: unknown sessionId ${input.sessionId}`);
|
|
884
|
+
}
|
|
885
|
+
return ringBuffer.drain(input.maxCount);
|
|
886
|
+
}
|
|
887
|
+
async updateConfig(input) {
|
|
888
|
+
const session = this.sessions.get(input.sessionId);
|
|
889
|
+
if (!session) {
|
|
890
|
+
throw new Error(`decoder-nodeav: unknown sessionId ${input.sessionId}`);
|
|
891
|
+
}
|
|
892
|
+
session.updateConfig(input.config);
|
|
893
|
+
}
|
|
894
|
+
async getStats(input) {
|
|
895
|
+
const session = this.sessions.get(input.sessionId);
|
|
896
|
+
if (!session) {
|
|
897
|
+
throw new Error(`decoder-nodeav: unknown sessionId ${input.sessionId}`);
|
|
898
|
+
}
|
|
899
|
+
return session.getStats();
|
|
900
|
+
}
|
|
901
|
+
async onShutdown() {
|
|
902
|
+
this.ctx.logger.info("node-av decoder addon shutdown \u2014 destroying all sessions");
|
|
903
|
+
const destroyPromises = [];
|
|
904
|
+
for (const [sessionId, session] of this.sessions) {
|
|
905
|
+
const unsub = this.unsubscribers.get(sessionId);
|
|
906
|
+
if (unsub) unsub();
|
|
907
|
+
destroyPromises.push(session.destroy());
|
|
908
|
+
}
|
|
909
|
+
await Promise.all(destroyPromises);
|
|
910
|
+
this.sessions.clear();
|
|
911
|
+
this.frameBuffers.clear();
|
|
912
|
+
this.unsubscribers.clear();
|
|
913
|
+
this.sessionMeta.clear();
|
|
914
|
+
}
|
|
915
|
+
};
|
|
916
|
+
export {
|
|
917
|
+
DecoderNodeAvAddon,
|
|
918
|
+
NodeAvDecoderSession
|
|
919
|
+
};
|
|
920
|
+
//# sourceMappingURL=index.mjs.map
|