@revizly/node-av 5.2.2-beta.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (254) hide show
  1. package/BUILD_LINUX.md +61 -0
  2. package/LICENSE.md +22 -0
  3. package/README.md +662 -0
  4. package/build_mac_local.sh +69 -0
  5. package/dist/api/audio-frame-buffer.d.ts +205 -0
  6. package/dist/api/audio-frame-buffer.js +287 -0
  7. package/dist/api/audio-frame-buffer.js.map +1 -0
  8. package/dist/api/bitstream-filter.d.ts +820 -0
  9. package/dist/api/bitstream-filter.js +1242 -0
  10. package/dist/api/bitstream-filter.js.map +1 -0
  11. package/dist/api/constants.d.ts +44 -0
  12. package/dist/api/constants.js +45 -0
  13. package/dist/api/constants.js.map +1 -0
  14. package/dist/api/data/test_av1.ivf +0 -0
  15. package/dist/api/data/test_h264.h264 +0 -0
  16. package/dist/api/data/test_hevc.h265 +0 -0
  17. package/dist/api/data/test_mjpeg.mjpeg +0 -0
  18. package/dist/api/data/test_vp8.ivf +0 -0
  19. package/dist/api/data/test_vp9.ivf +0 -0
  20. package/dist/api/decoder.d.ts +1088 -0
  21. package/dist/api/decoder.js +1775 -0
  22. package/dist/api/decoder.js.map +1 -0
  23. package/dist/api/demuxer.d.ts +1219 -0
  24. package/dist/api/demuxer.js +2081 -0
  25. package/dist/api/demuxer.js.map +1 -0
  26. package/dist/api/device.d.ts +586 -0
  27. package/dist/api/device.js +961 -0
  28. package/dist/api/device.js.map +1 -0
  29. package/dist/api/encoder.d.ts +1132 -0
  30. package/dist/api/encoder.js +1988 -0
  31. package/dist/api/encoder.js.map +1 -0
  32. package/dist/api/filter-complex.d.ts +821 -0
  33. package/dist/api/filter-complex.js +1604 -0
  34. package/dist/api/filter-complex.js.map +1 -0
  35. package/dist/api/filter-presets.d.ts +1286 -0
  36. package/dist/api/filter-presets.js +2152 -0
  37. package/dist/api/filter-presets.js.map +1 -0
  38. package/dist/api/filter.d.ts +1234 -0
  39. package/dist/api/filter.js +1976 -0
  40. package/dist/api/filter.js.map +1 -0
  41. package/dist/api/fmp4-stream.d.ts +426 -0
  42. package/dist/api/fmp4-stream.js +739 -0
  43. package/dist/api/fmp4-stream.js.map +1 -0
  44. package/dist/api/hardware.d.ts +651 -0
  45. package/dist/api/hardware.js +1260 -0
  46. package/dist/api/hardware.js.map +1 -0
  47. package/dist/api/index.d.ts +17 -0
  48. package/dist/api/index.js +32 -0
  49. package/dist/api/index.js.map +1 -0
  50. package/dist/api/io-stream.d.ts +307 -0
  51. package/dist/api/io-stream.js +282 -0
  52. package/dist/api/io-stream.js.map +1 -0
  53. package/dist/api/muxer.d.ts +957 -0
  54. package/dist/api/muxer.js +2002 -0
  55. package/dist/api/muxer.js.map +1 -0
  56. package/dist/api/pipeline.d.ts +607 -0
  57. package/dist/api/pipeline.js +1145 -0
  58. package/dist/api/pipeline.js.map +1 -0
  59. package/dist/api/utilities/async-queue.d.ts +120 -0
  60. package/dist/api/utilities/async-queue.js +211 -0
  61. package/dist/api/utilities/async-queue.js.map +1 -0
  62. package/dist/api/utilities/audio-sample.d.ts +117 -0
  63. package/dist/api/utilities/audio-sample.js +112 -0
  64. package/dist/api/utilities/audio-sample.js.map +1 -0
  65. package/dist/api/utilities/channel-layout.d.ts +76 -0
  66. package/dist/api/utilities/channel-layout.js +80 -0
  67. package/dist/api/utilities/channel-layout.js.map +1 -0
  68. package/dist/api/utilities/electron-shared-texture.d.ts +328 -0
  69. package/dist/api/utilities/electron-shared-texture.js +503 -0
  70. package/dist/api/utilities/electron-shared-texture.js.map +1 -0
  71. package/dist/api/utilities/image.d.ts +207 -0
  72. package/dist/api/utilities/image.js +213 -0
  73. package/dist/api/utilities/image.js.map +1 -0
  74. package/dist/api/utilities/index.d.ts +12 -0
  75. package/dist/api/utilities/index.js +25 -0
  76. package/dist/api/utilities/index.js.map +1 -0
  77. package/dist/api/utilities/media-type.d.ts +49 -0
  78. package/dist/api/utilities/media-type.js +53 -0
  79. package/dist/api/utilities/media-type.js.map +1 -0
  80. package/dist/api/utilities/pixel-format.d.ts +89 -0
  81. package/dist/api/utilities/pixel-format.js +97 -0
  82. package/dist/api/utilities/pixel-format.js.map +1 -0
  83. package/dist/api/utilities/sample-format.d.ts +129 -0
  84. package/dist/api/utilities/sample-format.js +141 -0
  85. package/dist/api/utilities/sample-format.js.map +1 -0
  86. package/dist/api/utilities/scheduler.d.ts +138 -0
  87. package/dist/api/utilities/scheduler.js +98 -0
  88. package/dist/api/utilities/scheduler.js.map +1 -0
  89. package/dist/api/utilities/streaming.d.ts +186 -0
  90. package/dist/api/utilities/streaming.js +309 -0
  91. package/dist/api/utilities/streaming.js.map +1 -0
  92. package/dist/api/utilities/timestamp.d.ts +193 -0
  93. package/dist/api/utilities/timestamp.js +206 -0
  94. package/dist/api/utilities/timestamp.js.map +1 -0
  95. package/dist/api/utilities/whisper-model.d.ts +310 -0
  96. package/dist/api/utilities/whisper-model.js +528 -0
  97. package/dist/api/utilities/whisper-model.js.map +1 -0
  98. package/dist/api/utils.d.ts +19 -0
  99. package/dist/api/utils.js +39 -0
  100. package/dist/api/utils.js.map +1 -0
  101. package/dist/api/whisper.d.ts +324 -0
  102. package/dist/api/whisper.js +362 -0
  103. package/dist/api/whisper.js.map +1 -0
  104. package/dist/constants/channel-layouts.d.ts +53 -0
  105. package/dist/constants/channel-layouts.js +57 -0
  106. package/dist/constants/channel-layouts.js.map +1 -0
  107. package/dist/constants/constants.d.ts +2325 -0
  108. package/dist/constants/constants.js +1887 -0
  109. package/dist/constants/constants.js.map +1 -0
  110. package/dist/constants/decoders.d.ts +633 -0
  111. package/dist/constants/decoders.js +641 -0
  112. package/dist/constants/decoders.js.map +1 -0
  113. package/dist/constants/encoders.d.ts +295 -0
  114. package/dist/constants/encoders.js +308 -0
  115. package/dist/constants/encoders.js.map +1 -0
  116. package/dist/constants/hardware.d.ts +26 -0
  117. package/dist/constants/hardware.js +27 -0
  118. package/dist/constants/hardware.js.map +1 -0
  119. package/dist/constants/index.d.ts +5 -0
  120. package/dist/constants/index.js +6 -0
  121. package/dist/constants/index.js.map +1 -0
  122. package/dist/ffmpeg/index.d.ts +99 -0
  123. package/dist/ffmpeg/index.js +115 -0
  124. package/dist/ffmpeg/index.js.map +1 -0
  125. package/dist/ffmpeg/utils.d.ts +31 -0
  126. package/dist/ffmpeg/utils.js +68 -0
  127. package/dist/ffmpeg/utils.js.map +1 -0
  128. package/dist/ffmpeg/version.d.ts +6 -0
  129. package/dist/ffmpeg/version.js +7 -0
  130. package/dist/ffmpeg/version.js.map +1 -0
  131. package/dist/index.d.ts +4 -0
  132. package/dist/index.js +9 -0
  133. package/dist/index.js.map +1 -0
  134. package/dist/lib/audio-fifo.d.ts +399 -0
  135. package/dist/lib/audio-fifo.js +431 -0
  136. package/dist/lib/audio-fifo.js.map +1 -0
  137. package/dist/lib/binding.d.ts +228 -0
  138. package/dist/lib/binding.js +60 -0
  139. package/dist/lib/binding.js.map +1 -0
  140. package/dist/lib/bitstream-filter-context.d.ts +379 -0
  141. package/dist/lib/bitstream-filter-context.js +441 -0
  142. package/dist/lib/bitstream-filter-context.js.map +1 -0
  143. package/dist/lib/bitstream-filter.d.ts +140 -0
  144. package/dist/lib/bitstream-filter.js +154 -0
  145. package/dist/lib/bitstream-filter.js.map +1 -0
  146. package/dist/lib/codec-context.d.ts +1071 -0
  147. package/dist/lib/codec-context.js +1354 -0
  148. package/dist/lib/codec-context.js.map +1 -0
  149. package/dist/lib/codec-parameters.d.ts +616 -0
  150. package/dist/lib/codec-parameters.js +761 -0
  151. package/dist/lib/codec-parameters.js.map +1 -0
  152. package/dist/lib/codec-parser.d.ts +201 -0
  153. package/dist/lib/codec-parser.js +213 -0
  154. package/dist/lib/codec-parser.js.map +1 -0
  155. package/dist/lib/codec.d.ts +586 -0
  156. package/dist/lib/codec.js +713 -0
  157. package/dist/lib/codec.js.map +1 -0
  158. package/dist/lib/device.d.ts +291 -0
  159. package/dist/lib/device.js +324 -0
  160. package/dist/lib/device.js.map +1 -0
  161. package/dist/lib/dictionary.d.ts +333 -0
  162. package/dist/lib/dictionary.js +372 -0
  163. package/dist/lib/dictionary.js.map +1 -0
  164. package/dist/lib/error.d.ts +242 -0
  165. package/dist/lib/error.js +303 -0
  166. package/dist/lib/error.js.map +1 -0
  167. package/dist/lib/fifo.d.ts +416 -0
  168. package/dist/lib/fifo.js +453 -0
  169. package/dist/lib/fifo.js.map +1 -0
  170. package/dist/lib/filter-context.d.ts +712 -0
  171. package/dist/lib/filter-context.js +789 -0
  172. package/dist/lib/filter-context.js.map +1 -0
  173. package/dist/lib/filter-graph-segment.d.ts +160 -0
  174. package/dist/lib/filter-graph-segment.js +171 -0
  175. package/dist/lib/filter-graph-segment.js.map +1 -0
  176. package/dist/lib/filter-graph.d.ts +641 -0
  177. package/dist/lib/filter-graph.js +704 -0
  178. package/dist/lib/filter-graph.js.map +1 -0
  179. package/dist/lib/filter-inout.d.ts +198 -0
  180. package/dist/lib/filter-inout.js +257 -0
  181. package/dist/lib/filter-inout.js.map +1 -0
  182. package/dist/lib/filter.d.ts +243 -0
  183. package/dist/lib/filter.js +272 -0
  184. package/dist/lib/filter.js.map +1 -0
  185. package/dist/lib/format-context.d.ts +1254 -0
  186. package/dist/lib/format-context.js +1379 -0
  187. package/dist/lib/format-context.js.map +1 -0
  188. package/dist/lib/frame-utils.d.ts +116 -0
  189. package/dist/lib/frame-utils.js +98 -0
  190. package/dist/lib/frame-utils.js.map +1 -0
  191. package/dist/lib/frame.d.ts +1222 -0
  192. package/dist/lib/frame.js +1435 -0
  193. package/dist/lib/frame.js.map +1 -0
  194. package/dist/lib/hardware-device-context.d.ts +362 -0
  195. package/dist/lib/hardware-device-context.js +383 -0
  196. package/dist/lib/hardware-device-context.js.map +1 -0
  197. package/dist/lib/hardware-frames-context.d.ts +419 -0
  198. package/dist/lib/hardware-frames-context.js +477 -0
  199. package/dist/lib/hardware-frames-context.js.map +1 -0
  200. package/dist/lib/index.d.ts +35 -0
  201. package/dist/lib/index.js +60 -0
  202. package/dist/lib/index.js.map +1 -0
  203. package/dist/lib/input-format.d.ts +249 -0
  204. package/dist/lib/input-format.js +306 -0
  205. package/dist/lib/input-format.js.map +1 -0
  206. package/dist/lib/io-context.d.ts +696 -0
  207. package/dist/lib/io-context.js +769 -0
  208. package/dist/lib/io-context.js.map +1 -0
  209. package/dist/lib/log.d.ts +174 -0
  210. package/dist/lib/log.js +184 -0
  211. package/dist/lib/log.js.map +1 -0
  212. package/dist/lib/native-types.d.ts +946 -0
  213. package/dist/lib/native-types.js +2 -0
  214. package/dist/lib/native-types.js.map +1 -0
  215. package/dist/lib/option.d.ts +927 -0
  216. package/dist/lib/option.js +1583 -0
  217. package/dist/lib/option.js.map +1 -0
  218. package/dist/lib/output-format.d.ts +180 -0
  219. package/dist/lib/output-format.js +213 -0
  220. package/dist/lib/output-format.js.map +1 -0
  221. package/dist/lib/packet.d.ts +501 -0
  222. package/dist/lib/packet.js +590 -0
  223. package/dist/lib/packet.js.map +1 -0
  224. package/dist/lib/rational.d.ts +251 -0
  225. package/dist/lib/rational.js +278 -0
  226. package/dist/lib/rational.js.map +1 -0
  227. package/dist/lib/software-resample-context.d.ts +552 -0
  228. package/dist/lib/software-resample-context.js +592 -0
  229. package/dist/lib/software-resample-context.js.map +1 -0
  230. package/dist/lib/software-scale-context.d.ts +344 -0
  231. package/dist/lib/software-scale-context.js +366 -0
  232. package/dist/lib/software-scale-context.js.map +1 -0
  233. package/dist/lib/stream.d.ts +379 -0
  234. package/dist/lib/stream.js +526 -0
  235. package/dist/lib/stream.js.map +1 -0
  236. package/dist/lib/sync-queue.d.ts +179 -0
  237. package/dist/lib/sync-queue.js +197 -0
  238. package/dist/lib/sync-queue.js.map +1 -0
  239. package/dist/lib/types.d.ts +34 -0
  240. package/dist/lib/types.js +2 -0
  241. package/dist/lib/types.js.map +1 -0
  242. package/dist/lib/utilities.d.ts +1127 -0
  243. package/dist/lib/utilities.js +1225 -0
  244. package/dist/lib/utilities.js.map +1 -0
  245. package/dist/utils/electron.d.ts +49 -0
  246. package/dist/utils/electron.js +63 -0
  247. package/dist/utils/electron.js.map +1 -0
  248. package/dist/utils/index.d.ts +4 -0
  249. package/dist/utils/index.js +5 -0
  250. package/dist/utils/index.js.map +1 -0
  251. package/install/check.js +121 -0
  252. package/install/ffmpeg.js +66 -0
  253. package/jellyfin-ffmpeg.patch +181 -0
  254. package/package.json +129 -0
@@ -0,0 +1,2081 @@
1
+ var __addDisposableResource = (this && this.__addDisposableResource) || function (env, value, async) {
2
+ if (value !== null && value !== void 0) {
3
+ if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected.");
4
+ var dispose, inner;
5
+ if (async) {
6
+ if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined.");
7
+ dispose = value[Symbol.asyncDispose];
8
+ }
9
+ if (dispose === void 0) {
10
+ if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined.");
11
+ dispose = value[Symbol.dispose];
12
+ if (async) inner = dispose;
13
+ }
14
+ if (typeof dispose !== "function") throw new TypeError("Object not disposable.");
15
+ if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } };
16
+ env.stack.push({ value: value, dispose: dispose, async: async });
17
+ }
18
+ else if (async) {
19
+ env.stack.push({ async: true });
20
+ }
21
+ return value;
22
+ };
23
+ var __disposeResources = (this && this.__disposeResources) || (function (SuppressedError) {
24
+ return function (env) {
25
+ function fail(e) {
26
+ env.error = env.hasError ? new SuppressedError(e, env.error, "An error was suppressed during disposal.") : e;
27
+ env.hasError = true;
28
+ }
29
+ var r, s = 0;
30
+ function next() {
31
+ while (r = env.stack.pop()) {
32
+ try {
33
+ if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next);
34
+ if (r.dispose) {
35
+ var result = r.dispose.call(r.value);
36
+ if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); });
37
+ }
38
+ else s |= 1;
39
+ }
40
+ catch (e) {
41
+ fail(e);
42
+ }
43
+ }
44
+ if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve();
45
+ if (env.hasError) throw env.error;
46
+ }
47
+ return next();
48
+ };
49
+ })(typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) {
50
+ var e = new Error(message);
51
+ return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
52
+ });
53
+ import { createSocket } from 'dgram';
54
+ import { closeSync, openSync, readSync } from 'fs';
55
+ import { open } from 'fs/promises';
56
+ import { Readable } from 'node:stream';
57
+ import { resolve } from 'path';
58
+ import { AV_NOPTS_VALUE, AV_PIX_FMT_NONE, AV_ROUND_NEAR_INF, AV_ROUND_PASS_MINMAX, AV_TIME_BASE, AV_TIME_BASE_Q, AVFLAG_NONE, AVFMT_FLAG_CUSTOM_IO, AVFMT_FLAG_NONBLOCK, AVFMT_TS_DISCONT, AVMEDIA_TYPE_AUDIO, AVMEDIA_TYPE_VIDEO, AVSEEK_CUR, AVSEEK_END, AVSEEK_SET, } from '../constants/constants.js';
59
+ import { Dictionary } from '../lib/dictionary.js';
60
+ import { AVERROR_EAGAIN, FFmpegError } from '../lib/error.js';
61
+ import { FormatContext } from '../lib/format-context.js';
62
+ import { InputFormat } from '../lib/input-format.js';
63
+ import { IOContext } from '../lib/io-context.js';
64
+ import { Packet } from '../lib/packet.js';
65
+ import { Rational } from '../lib/rational.js';
66
+ import { avGetPixFmtName, avGetSampleFmtName, avRescaleQ, avRescaleQRnd, dtsPredict as nativeDtsPredict } from '../lib/utilities.js';
67
+ import { DELTA_THRESHOLD, DTS_ERROR_THRESHOLD, IO_BUFFER_SIZE, MAX_INPUT_QUEUE_SIZE } from './constants.js';
68
+ import { IOStream } from './io-stream.js';
69
+ import { StreamingUtils } from './utilities/streaming.js';
70
+ /**
71
+ * High-level demuxer for reading and demuxing media files.
72
+ *
73
+ * Provides simplified access to media streams, packets, and metadata.
74
+ * Handles file opening, format detection, and stream information extraction.
75
+ * Supports files, URLs, buffers, and raw data input with automatic cleanup.
76
+ * Essential component for media processing pipelines and transcoding.
77
+ *
78
+ * @example
79
+ * ```typescript
80
+ * import { Demuxer } from 'node-av/api';
81
+ *
82
+ * // Open media file
83
+ * await using input = await Demuxer.open('video.mp4');
84
+ * console.log(`Format: ${input.formatName}`);
85
+ * console.log(`Duration: ${input.duration}s`);
86
+ *
87
+ * // Process packets
88
+ * for await (const packet of input.packets()) {
89
+ * console.log(`Packet from stream ${packet.streamIndex}`);
90
+ * packet.free();
91
+ * }
92
+ * ```
93
+ *
94
+ * @example
95
+ * ```typescript
96
+ * // From buffer
97
+ * const buffer = await fs.readFile('video.mp4');
98
+ * await using input = await Demuxer.open(buffer);
99
+ *
100
+ * // Access streams
101
+ * const videoStream = input.video();
102
+ * const audioStream = input.audio();
103
+ * ```
104
+ *
105
+ * @see {@link Muxer} For writing media files
106
+ * @see {@link Decoder} For decoding packets to frames
107
+ * @see {@link FormatContext} For low-level API
108
+ */
109
+ export class Demuxer {
110
+ formatContext;
111
+ _streams = [];
112
+ ioContext;
113
+ isClosed = false;
114
+ options;
115
+ // Timestamp processing state (per-stream)
116
+ streamStates = new Map();
117
+ // Timestamp discontinuity tracking (global)
118
+ tsOffsetDiscont = 0n;
119
+ lastTs = AV_NOPTS_VALUE;
120
+ // Demux manager for handling multiple parallel packet generators
121
+ activeGenerators = 0;
122
+ demuxThread = null;
123
+ packetQueues = new Map(); // streamIndex or 'all' -> queue
124
+ queueResolvers = new Map(); // Promise resolvers for waiting consumers
125
+ demuxThreadActive = false;
126
+ demuxEof = false;
127
+ signal;
128
+ signalCleanup;
129
+ /**
130
+ * @param formatContext - Opened format context
131
+ *
132
+ * @param options - Media input options
133
+ *
134
+ * @param ioContext - Optional IO context for custom I/O (e.g., from Buffer)
135
+ *
136
+ * @internal
137
+ */
138
+ constructor(formatContext, options, ioContext) {
139
+ this.formatContext = formatContext;
140
+ this.ioContext = ioContext;
141
+ this._streams = formatContext.streams ?? [];
142
+ this.options = options;
143
+ }
144
+ /**
145
+ * Probe media format without fully opening the file.
146
+ *
147
+ * Detects format by analyzing file headers and content.
148
+ * Useful for format validation before processing.
149
+ *
150
+ * Direct mapping to av_probe_input_format().
151
+ *
152
+ * @param input - File path or buffer to probe
153
+ *
154
+ * @returns Format information or null if unrecognized
155
+ *
156
+ * @example
157
+ * ```typescript
158
+ * const info = await Demuxer.probeFormat('video.mp4');
159
+ * if (info) {
160
+ * console.log(`Format: ${info.format}`);
161
+ * console.log(`Confidence: ${info.confidence}%`);
162
+ * }
163
+ * ```
164
+ *
165
+ * @example
166
+ * ```typescript
167
+ * // Probe from buffer
168
+ * const buffer = await fs.readFile('video.webm');
169
+ * const info = await Demuxer.probeFormat(buffer);
170
+ * console.log(`MIME type: ${info?.mimeType}`);
171
+ * ```
172
+ *
173
+ * @see {@link InputFormat.probe} For low-level probing
174
+ */
175
+ static async probeFormat(input) {
176
+ try {
177
+ if (Buffer.isBuffer(input)) {
178
+ // Probe from buffer
179
+ const format = InputFormat.probe(input);
180
+ if (!format) {
181
+ return null;
182
+ }
183
+ return {
184
+ format: format.name ?? 'unknown',
185
+ longName: format.longName ?? undefined,
186
+ extensions: format.extensions ?? undefined,
187
+ mimeType: format.mimeType ?? undefined,
188
+ confidence: 100, // Direct probe always has high confidence
189
+ };
190
+ }
191
+ else {
192
+ // For files, read first part and probe
193
+ let fileHandle;
194
+ try {
195
+ fileHandle = await open(input, 'r');
196
+ // Read first 64KB for probing
197
+ const buffer = Buffer.alloc(65536);
198
+ const { bytesRead } = await fileHandle.read(buffer, 0, 65536, 0);
199
+ const probeBuffer = buffer.subarray(0, bytesRead);
200
+ const format = InputFormat.probe(probeBuffer, input);
201
+ if (!format) {
202
+ return null;
203
+ }
204
+ return {
205
+ format: format.name ?? 'unknown',
206
+ longName: format.longName ?? undefined,
207
+ extensions: format.extensions ?? undefined,
208
+ mimeType: format.mimeType ?? undefined,
209
+ confidence: 90, // File-based probe with filename hint
210
+ };
211
+ }
212
+ catch {
213
+ // If file reading fails, return null
214
+ return null;
215
+ }
216
+ finally {
217
+ await fileHandle?.close();
218
+ }
219
+ }
220
+ }
221
+ catch {
222
+ return null;
223
+ }
224
+ }
225
+ /**
226
+ * Probe media format without fully opening the file synchronously.
227
+ * Synchronous version of probeFormat.
228
+ *
229
+ * Detects format by analyzing file headers and content.
230
+ * Useful for format validation before processing.
231
+ *
232
+ * Direct mapping to av_probe_input_format().
233
+ *
234
+ * @param input - File path or buffer to probe
235
+ *
236
+ * @returns Format information or null if unrecognized
237
+ *
238
+ * @example
239
+ * ```typescript
240
+ * const info = Demuxer.probeFormatSync('video.mp4');
241
+ * if (info) {
242
+ * console.log(`Format: ${info.format}`);
243
+ * console.log(`Confidence: ${info.confidence}%`);
244
+ * }
245
+ * ```
246
+ *
247
+ * @example
248
+ * ```typescript
249
+ * // Probe from buffer
250
+ * const buffer = fs.readFileSync('video.webm');
251
+ * const info = Demuxer.probeFormatSync(buffer);
252
+ * console.log(`MIME type: ${info?.mimeType}`);
253
+ * ```
254
+ *
255
+ * @see {@link probeFormat} For async version
256
+ */
257
+ static probeFormatSync(input) {
258
+ try {
259
+ if (Buffer.isBuffer(input)) {
260
+ // Probe from buffer
261
+ const format = InputFormat.probe(input);
262
+ if (!format) {
263
+ return null;
264
+ }
265
+ return {
266
+ format: format.name ?? 'unknown',
267
+ longName: format.longName ?? undefined,
268
+ extensions: format.extensions ?? undefined,
269
+ mimeType: format.mimeType ?? undefined,
270
+ confidence: 100, // Direct probe always has high confidence
271
+ };
272
+ }
273
+ else {
274
+ // For files, read first part and probe
275
+ let fd;
276
+ try {
277
+ fd = openSync(input, 'r');
278
+ // Read first 64KB for probing
279
+ const buffer = Buffer.alloc(65536);
280
+ const bytesRead = readSync(fd, buffer, 0, 65536, 0);
281
+ const probeBuffer = buffer.subarray(0, bytesRead);
282
+ const format = InputFormat.probe(probeBuffer, input);
283
+ if (!format) {
284
+ return null;
285
+ }
286
+ return {
287
+ format: format.name ?? 'unknown',
288
+ longName: format.longName ?? undefined,
289
+ extensions: format.extensions ?? undefined,
290
+ mimeType: format.mimeType ?? undefined,
291
+ confidence: 90, // File-based probe with filename hint
292
+ };
293
+ }
294
+ catch {
295
+ // If file reading fails, return null
296
+ return null;
297
+ }
298
+ finally {
299
+ if (fd !== undefined)
300
+ closeSync(fd);
301
+ }
302
+ }
303
+ }
304
+ catch {
305
+ return null;
306
+ }
307
+ }
308
+ static async open(input, options = {}) {
309
+ // Check if input is raw data
310
+ if (typeof input === 'object' && 'type' in input && ('width' in input || 'sampleRate' in input)) {
311
+ // Build options for raw data
312
+ const rawOptions = {
313
+ bufferSize: options.bufferSize,
314
+ format: options.format ?? (input.type === 'video' ? 'rawvideo' : 's16le'),
315
+ options: {
316
+ ...options.options,
317
+ },
318
+ };
319
+ if (input.type === 'video') {
320
+ rawOptions.options = {
321
+ ...rawOptions.options,
322
+ video_size: `${input.width}x${input.height}`,
323
+ pixel_format: avGetPixFmtName(input.pixelFormat) ?? 'yuv420p',
324
+ framerate: new Rational(input.frameRate.num, input.frameRate.den).toString(),
325
+ };
326
+ }
327
+ else {
328
+ rawOptions.options = {
329
+ ...rawOptions.options,
330
+ sample_rate: input.sampleRate,
331
+ channels: input.channels,
332
+ sample_fmt: avGetSampleFmtName(input.sampleFormat) ?? 's16le',
333
+ };
334
+ }
335
+ input = input.input;
336
+ options = rawOptions;
337
+ }
338
+ // Original implementation for non-raw data
339
+ const formatContext = new FormatContext();
340
+ let ioContext;
341
+ let optionsDict = null;
342
+ let inputFormat = null;
343
+ try {
344
+ // Create options dictionary if options are provided
345
+ if (options.options) {
346
+ optionsDict = Dictionary.fromObject(options.options);
347
+ }
348
+ // Find input format if specified
349
+ if (options.format) {
350
+ inputFormat = InputFormat.findInputFormat(options.format);
351
+ if (!inputFormat) {
352
+ throw new Error(`Input format '${options.format}' not found`);
353
+ }
354
+ }
355
+ if (typeof input === 'string') {
356
+ // File path or URL - resolve relative paths to absolute
357
+ // Skip path resolution for device inputs
358
+ // Check if it's a URL (starts with protocol://) or a file path
359
+ const isUrl = /^[a-zA-Z][a-zA-Z0-9+.-]*:\/\//.test(input);
360
+ const resolvedInput = isUrl || inputFormat ? input : resolve(input);
361
+ const ret = await formatContext.openInput(resolvedInput, inputFormat, optionsDict);
362
+ FFmpegError.throwIfError(ret, 'Failed to open input');
363
+ formatContext.setFlags(AVFMT_FLAG_NONBLOCK);
364
+ }
365
+ else if (Buffer.isBuffer(input)) {
366
+ // Validate buffer is not empty
367
+ if (input.length === 0) {
368
+ throw new Error('Cannot open media from empty buffer');
369
+ }
370
+ // From buffer - allocate context first for custom I/O
371
+ formatContext.allocContext();
372
+ ioContext = IOStream.create(input, { bufferSize: options.bufferSize });
373
+ formatContext.pb = ioContext;
374
+ const ret = await formatContext.openInput('', inputFormat, optionsDict);
375
+ FFmpegError.throwIfError(ret, 'Failed to open input from buffer');
376
+ }
377
+ else if (input instanceof IOContext) {
378
+ if (!options.format) {
379
+ throw new Error('Format must be specified for native IOContext input');
380
+ }
381
+ formatContext.allocContext();
382
+ ioContext = input;
383
+ formatContext.pb = ioContext;
384
+ formatContext.setFlags(AVFMT_FLAG_CUSTOM_IO);
385
+ const ret = await formatContext.openInput('', inputFormat, optionsDict);
386
+ FFmpegError.throwIfError(ret, 'Failed to open input from native IOContext');
387
+ }
388
+ else if (input instanceof Readable) {
389
+ // Readable stream - format is required
390
+ if (!options.format) {
391
+ throw new Error('Format must be specified for Readable stream input');
392
+ }
393
+ formatContext.allocContext();
394
+ ioContext = IOStream.create(input, { bufferSize: options.bufferSize });
395
+ formatContext.pb = ioContext;
396
+ formatContext.setFlags(AVFMT_FLAG_CUSTOM_IO);
397
+ const ret = await formatContext.openInput('', inputFormat, optionsDict);
398
+ FFmpegError.throwIfError(ret, 'Failed to open input from Readable stream');
399
+ }
400
+ else if (typeof input === 'object' && 'read' in input) {
401
+ // Custom I/O with callbacks - format is required
402
+ if (!options.format) {
403
+ throw new Error('Format must be specified for custom I/O');
404
+ }
405
+ // Allocate context first for custom I/O
406
+ formatContext.allocContext();
407
+ // Setup custom I/O with callbacks
408
+ ioContext = new IOContext();
409
+ ioContext.allocContextWithCallbacks(options.bufferSize ?? IO_BUFFER_SIZE, 0, input.read, null, input.seek);
410
+ formatContext.pb = ioContext;
411
+ formatContext.setFlags(AVFMT_FLAG_CUSTOM_IO);
412
+ const ret = await formatContext.openInput('', inputFormat, optionsDict);
413
+ FFmpegError.throwIfError(ret, 'Failed to open input from custom I/O');
414
+ }
415
+ else {
416
+ throw new TypeError('Invalid input type. Expected file path, URL, Buffer, IOContext, Readable, or IOInputCallbacks');
417
+ }
418
+ // Find stream information
419
+ if (!options.skipStreamInfo) {
420
+ const ret = await formatContext.findStreamInfo(null);
421
+ FFmpegError.throwIfError(ret, 'Failed to find stream info');
422
+ // Try to parse extradata for video streams with missing dimensions
423
+ for (const stream of formatContext.streams ?? []) {
424
+ if (stream.codecpar.codecType === AVMEDIA_TYPE_VIDEO) {
425
+ const dimensionsMissing = stream.codecpar.width === 0 || stream.codecpar.height === 0;
426
+ const invalidFormat = stream.codecpar.format === AV_PIX_FMT_NONE;
427
+ const invalidRate = stream.codecpar.frameRate.num === 0 || stream.codecpar.frameRate.den === 0;
428
+ const needsParsing = dimensionsMissing || invalidFormat || invalidRate;
429
+ if (needsParsing && stream.codecpar.extradataSize > 0) {
430
+ stream.codecpar.parseExtradata();
431
+ }
432
+ }
433
+ }
434
+ }
435
+ // Determine buffer size
436
+ let bufferSize = options.bufferSize ?? IO_BUFFER_SIZE;
437
+ if (!ioContext && formatContext.iformat && formatContext.pb) {
438
+ // Check if this is a streaming input (like RTSP, HTTP, etc.)
439
+ const isStreaming = formatContext.pb.seekable === 0;
440
+ if (isStreaming) {
441
+ bufferSize *= 2; // double buffer size for streaming inputs
442
+ }
443
+ }
444
+ // Apply defaults to options
445
+ const fullOptions = {
446
+ bufferSize,
447
+ format: options.format ?? '',
448
+ skipStreamInfo: options.skipStreamInfo ?? false,
449
+ startWithKeyframe: options.startWithKeyframe ?? false,
450
+ dtsDeltaThreshold: options.dtsDeltaThreshold ?? DELTA_THRESHOLD,
451
+ dtsErrorThreshold: options.dtsErrorThreshold ?? DTS_ERROR_THRESHOLD,
452
+ copyTs: options.copyTs ?? false,
453
+ options: options.options ?? {},
454
+ };
455
+ const demuxer = new Demuxer(formatContext, fullOptions, ioContext);
456
+ if (options.signal) {
457
+ options.signal.throwIfAborted();
458
+ demuxer.signal = options.signal;
459
+ }
460
+ return demuxer;
461
+ }
462
+ catch (error) {
463
+ // Clean up only on error
464
+ if (ioContext) {
465
+ // Clear the pb reference first
466
+ formatContext.pb = null;
467
+ // Free the IOContext (for both custom I/O and buffer-based I/O)
468
+ ioContext.freeContext();
469
+ }
470
+ // Clean up FormatContext
471
+ await formatContext.closeInput();
472
+ throw error;
473
+ }
474
+ finally {
475
+ // Clean up options dictionary
476
+ if (optionsDict) {
477
+ optionsDict.free();
478
+ }
479
+ }
480
+ }
481
+ static openSync(input, options = {}) {
482
+ // Check if input is raw data
483
+ if (typeof input === 'object' && 'type' in input && ('width' in input || 'sampleRate' in input)) {
484
+ // Build options for raw data
485
+ const rawOptions = {
486
+ bufferSize: options.bufferSize,
487
+ format: options.format ?? (input.type === 'video' ? 'rawvideo' : 's16le'),
488
+ options: {
489
+ ...options.options,
490
+ },
491
+ };
492
+ if (input.type === 'video') {
493
+ rawOptions.options = {
494
+ ...rawOptions.options,
495
+ video_size: `${input.width}x${input.height}`,
496
+ pixel_format: avGetPixFmtName(input.pixelFormat) ?? 'yuv420p',
497
+ framerate: new Rational(input.frameRate.num, input.frameRate.den).toString(),
498
+ };
499
+ }
500
+ else {
501
+ rawOptions.options = {
502
+ ...rawOptions.options,
503
+ sample_rate: input.sampleRate,
504
+ channels: input.channels,
505
+ sample_fmt: avGetSampleFmtName(input.sampleFormat) ?? 's16le',
506
+ };
507
+ }
508
+ input = input.input;
509
+ options = rawOptions;
510
+ }
511
+ // Original implementation for non-raw data
512
+ const formatContext = new FormatContext();
513
+ let ioContext;
514
+ let optionsDict = null;
515
+ let inputFormat = null;
516
+ try {
517
+ // Create options dictionary if options are provided
518
+ if (options.options) {
519
+ optionsDict = Dictionary.fromObject(options.options);
520
+ }
521
+ // Find input format if specified
522
+ if (options.format) {
523
+ inputFormat = InputFormat.findInputFormat(options.format);
524
+ if (!inputFormat) {
525
+ throw new Error(`Input format '${options.format}' not found`);
526
+ }
527
+ }
528
+ if (typeof input === 'string') {
529
+ // File path or URL - resolve relative paths to absolute
530
+ // Skip path resolution for device inputs
531
+ // Check if it's a URL (starts with protocol://) or a file path
532
+ const isUrl = /^[a-zA-Z][a-zA-Z0-9+.-]*:\/\//.test(input);
533
+ const resolvedInput = isUrl || inputFormat ? input : resolve(input);
534
+ const ret = formatContext.openInputSync(resolvedInput, inputFormat, optionsDict);
535
+ FFmpegError.throwIfError(ret, 'Failed to open input');
536
+ formatContext.setFlags(AVFMT_FLAG_NONBLOCK);
537
+ }
538
+ else if (Buffer.isBuffer(input)) {
539
+ // Validate buffer is not empty
540
+ if (input.length === 0) {
541
+ throw new Error('Cannot open media from empty buffer');
542
+ }
543
+ // From buffer - allocate context first for custom I/O
544
+ formatContext.allocContext();
545
+ ioContext = IOStream.create(input, { bufferSize: options.bufferSize });
546
+ formatContext.pb = ioContext;
547
+ const ret = formatContext.openInputSync('', inputFormat, optionsDict);
548
+ FFmpegError.throwIfError(ret, 'Failed to open input from buffer');
549
+ }
550
+ else if (input instanceof IOContext) {
551
+ if (!options.format) {
552
+ throw new Error('Format must be specified for native IOContext input');
553
+ }
554
+ formatContext.allocContext();
555
+ ioContext = input;
556
+ formatContext.pb = ioContext;
557
+ formatContext.setFlags(AVFMT_FLAG_CUSTOM_IO);
558
+ const ret = formatContext.openInputSync('', inputFormat, optionsDict);
559
+ FFmpegError.throwIfError(ret, 'Failed to open input from native IOContext');
560
+ }
561
+ else if (input instanceof Readable) {
562
+ // Readable stream - format is required
563
+ if (!options.format) {
564
+ throw new Error('Format must be specified for Readable stream input');
565
+ }
566
+ formatContext.allocContext();
567
+ ioContext = IOStream.create(input, { bufferSize: options.bufferSize });
568
+ formatContext.pb = ioContext;
569
+ formatContext.setFlags(AVFMT_FLAG_CUSTOM_IO);
570
+ const ret = formatContext.openInputSync('', inputFormat, optionsDict);
571
+ FFmpegError.throwIfError(ret, 'Failed to open input from Readable stream');
572
+ }
573
+ else if (typeof input === 'object' && 'read' in input) {
574
+ // Custom I/O with callbacks - format is required
575
+ if (!options.format) {
576
+ throw new Error('Format must be specified for custom I/O');
577
+ }
578
+ // Allocate context first for custom I/O
579
+ formatContext.allocContext();
580
+ // Setup custom I/O with callbacks
581
+ ioContext = new IOContext();
582
+ ioContext.allocContextWithCallbacks(options.bufferSize ?? IO_BUFFER_SIZE, 0, input.read, null, input.seek);
583
+ formatContext.pb = ioContext;
584
+ formatContext.setFlags(AVFMT_FLAG_CUSTOM_IO);
585
+ const ret = formatContext.openInputSync('', inputFormat, optionsDict);
586
+ FFmpegError.throwIfError(ret, 'Failed to open input from custom I/O');
587
+ }
588
+ else {
589
+ throw new TypeError('Invalid input type. Expected file path, URL, Buffer, IOContext, Readable, or IOInputCallbacks');
590
+ }
591
+ // Find stream information
592
+ if (!options.skipStreamInfo) {
593
+ const ret = formatContext.findStreamInfoSync(null);
594
+ FFmpegError.throwIfError(ret, 'Failed to find stream info');
595
+ }
596
+ // Determine buffer size
597
+ let bufferSize = options.bufferSize ?? IO_BUFFER_SIZE;
598
+ if (!ioContext && formatContext.iformat && formatContext.pb) {
599
+ // Check if this is a streaming input (like RTSP, HTTP, etc.)
600
+ const isStreaming = formatContext.pb.seekable === 0;
601
+ if (isStreaming) {
602
+ bufferSize *= 2; // double buffer size for streaming inputs
603
+ }
604
+ }
605
+ // Apply defaults to options
606
+ const fullOptions = {
607
+ bufferSize,
608
+ format: options.format ?? '',
609
+ skipStreamInfo: options.skipStreamInfo ?? false,
610
+ startWithKeyframe: options.startWithKeyframe ?? false,
611
+ dtsDeltaThreshold: options.dtsDeltaThreshold ?? DELTA_THRESHOLD,
612
+ dtsErrorThreshold: options.dtsErrorThreshold ?? DTS_ERROR_THRESHOLD,
613
+ copyTs: options.copyTs ?? false,
614
+ options: options.options ?? {},
615
+ };
616
+ const demuxer = new Demuxer(formatContext, fullOptions, ioContext);
617
+ if (options.signal) {
618
+ options.signal.throwIfAborted();
619
+ demuxer.signal = options.signal;
620
+ }
621
+ return demuxer;
622
+ }
623
+ catch (error) {
624
+ // Clean up only on error
625
+ if (ioContext) {
626
+ // Clear the pb reference first
627
+ formatContext.pb = null;
628
+ // Free the IOContext (for both custom I/O and buffer-based I/O)
629
+ ioContext.freeContext();
630
+ }
631
+ // Clean up FormatContext
632
+ formatContext.closeInputSync();
633
+ throw error;
634
+ }
635
+ finally {
636
+ // Clean up options dictionary
637
+ if (optionsDict) {
638
+ optionsDict.free();
639
+ }
640
+ }
641
+ }
642
+ /**
643
+ * Open RTP/SRTP input stream via localhost UDP.
644
+ *
645
+ * Creates a Demuxer from SDP string received via UDP socket.
646
+ * Opens UDP socket and configures FFmpeg to receive and parse RTP packets.
647
+ *
648
+ * @param sdpContent - SDP content string describing the RTP stream
649
+ *
650
+ * @throws {Error} If SDP parsing or socket setup fails
651
+ *
652
+ * @throws {FFmpegError} If FFmpeg operations fail
653
+ *
654
+ * @returns Promise with Demuxer, sendPacket function and cleanup
655
+ *
656
+ * @example
657
+ * ```typescript
658
+ * import { Demuxer, StreamingUtils } from 'node-av/api';
659
+ * import { AV_CODEC_ID_OPUS } from 'node-av/constants';
660
+ *
661
+ * // Generate SDP for SRTP encrypted Opus
662
+ * const sdp = StreamingUtils.createRTPInputSDP([{
663
+ * port: 5004,
664
+ * codecId: AV_CODEC_ID_OPUS,
665
+ * payloadType: 111,
666
+ * clockRate: 16000,
667
+ * channels: 1,
668
+ * srtp: { key: srtpKey, salt: srtpSalt }
669
+ * }]);
670
+ *
671
+ * // Open RTP input
672
+ * const { input, sendPacket, close } = await Demuxer.openSDP(sdp);
673
+ *
674
+ * // Route encrypted RTP packets from network
675
+ * socket.on('message', (msg) => sendPacket(msg));
676
+ *
677
+ * // Decode audio
678
+ * const decoder = await Decoder.create(input.audio()!);
679
+ * for await (const packet of input.packets()) {
680
+ * const frame = await decoder.decode(packet);
681
+ * // Process frame...
682
+ * }
683
+ *
684
+ * // Cleanup
685
+ * await close();
686
+ * ```
687
+ *
688
+ * @see {@link StreamingUtils.createInputSDP} to generate SDP content.
689
+ */
690
+ static async openSDP(sdpContent) {
691
+ // Extract all ports from SDP (supports multi-stream: video + audio)
692
+ const ports = StreamingUtils.extractPortsFromSDP(sdpContent);
693
+ if (ports.length === 0) {
694
+ throw new Error('Failed to extract any ports from SDP content');
695
+ }
696
+ // Convert SDP to buffer for custom I/O
697
+ const sdpBuffer = Buffer.from(sdpContent);
698
+ let position = 0;
699
+ // Create custom I/O callbacks for SDP content
700
+ const callbacks = {
701
+ read: (size) => {
702
+ if (position >= sdpBuffer.length) {
703
+ return null; // EOF
704
+ }
705
+ const chunk = sdpBuffer.subarray(position, Math.min(position + size, sdpBuffer.length));
706
+ position += chunk.length;
707
+ return chunk;
708
+ },
709
+ seek: (offset, whence) => {
710
+ const offsetNum = Number(offset);
711
+ if (whence === AVSEEK_SET) {
712
+ position = offsetNum;
713
+ }
714
+ else if (whence === AVSEEK_CUR) {
715
+ position += offsetNum;
716
+ }
717
+ else if (whence === AVSEEK_END) {
718
+ position = sdpBuffer.length + offsetNum;
719
+ }
720
+ return position;
721
+ },
722
+ };
723
+ // Create UDP socket for sending packets to FFmpeg
724
+ const udpSocket = createSocket('udp4');
725
+ try {
726
+ // Open Demuxer with SDP format using custom I/O
727
+ const input = await Demuxer.open(callbacks, {
728
+ format: 'sdp',
729
+ skipStreamInfo: true,
730
+ options: {
731
+ protocol_whitelist: 'pipe,udp,rtp,file,crypto',
732
+ listen_timeout: -1,
733
+ },
734
+ });
735
+ const sendPacket = (rtpPacket, streamIndex = 0) => {
736
+ const port = ports[streamIndex];
737
+ if (!port) {
738
+ throw new Error(`No port found for stream index ${streamIndex}. Available streams: ${ports.length}`);
739
+ }
740
+ udpSocket.send(rtpPacket, port, '127.0.0.1');
741
+ };
742
+ const close = async () => {
743
+ await input.close();
744
+ udpSocket.close();
745
+ };
746
+ const closeSync = () => {
747
+ input.closeSync();
748
+ udpSocket.close();
749
+ };
750
+ return { input, sendPacket, close, closeSync };
751
+ }
752
+ catch (error) {
753
+ // Cleanup on error
754
+ udpSocket.close();
755
+ throw error;
756
+ }
757
+ }
758
+ /**
759
+ * Open RTP/SRTP input stream via localhost UDP synchronously.
760
+ * Synchronous version of openSDP.
761
+ *
762
+ * Creates a Demuxer from SDP string received via UDP socket.
763
+ * Opens UDP socket and configures FFmpeg to receive and parse RTP packets.
764
+ *
765
+ * @param sdpContent - SDP content string describing the RTP stream
766
+ *
767
+ * @throws {Error} If SDP parsing or socket setup fails
768
+ *
769
+ * @throws {FFmpegError} If FFmpeg operations fail
770
+ *
771
+ * @returns Object with Demuxer, sendPacket function and cleanup
772
+ *
773
+ * @example
774
+ * ```typescript
775
+ * import { Demuxer, StreamingUtils } from 'node-av/api';
776
+ * import { AV_CODEC_ID_OPUS } from 'node-av/constants';
777
+ *
778
+ * // Generate SDP for SRTP encrypted Opus
779
+ * const sdp = StreamingUtils.createRTPInputSDP([{
780
+ * port: 5004,
781
+ * codecId: AV_CODEC_ID_OPUS,
782
+ * payloadType: 111,
783
+ * clockRate: 16000,
784
+ * channels: 1,
785
+ * srtp: { key: srtpKey, salt: srtpSalt }
786
+ * }]);
787
+ *
788
+ * // Open RTP input
789
+ * const { input, sendPacket, closeSync } = Demuxer.openSDPSync(sdp);
790
+ *
791
+ * // Route encrypted RTP packets from network
792
+ * socket.on('message', (msg) => sendPacket(msg));
793
+ *
794
+ * // Decode audio
795
+ * const decoder = await Decoder.create(input.audio()!);
796
+ * for await (const packet of input.packets()) {
797
+ * const frame = await decoder.decode(packet);
798
+ * // Process frame...
799
+ * }
800
+ *
801
+ * // Cleanup synchronously
802
+ * closeSync();
803
+ * ```
804
+ *
805
+ * @see {@link StreamingUtils.createInputSDP} to generate SDP content.
806
+ * @see {@link openSDP} For async version
807
+ */
808
+ static openSDPSync(sdpContent) {
809
+ // Extract all ports from SDP (supports multi-stream: video + audio)
810
+ const ports = StreamingUtils.extractPortsFromSDP(sdpContent);
811
+ if (ports.length === 0) {
812
+ throw new Error('Failed to extract any ports from SDP content');
813
+ }
814
+ // Convert SDP to buffer for custom I/O
815
+ const sdpBuffer = Buffer.from(sdpContent);
816
+ let position = 0;
817
+ // Create custom I/O callbacks for SDP content
818
+ const callbacks = {
819
+ read: (size) => {
820
+ if (position >= sdpBuffer.length) {
821
+ return null; // EOF
822
+ }
823
+ const chunk = sdpBuffer.subarray(position, Math.min(position + size, sdpBuffer.length));
824
+ position += chunk.length;
825
+ return chunk;
826
+ },
827
+ seek: (offset, whence) => {
828
+ const offsetNum = Number(offset);
829
+ if (whence === AVSEEK_SET) {
830
+ position = offsetNum;
831
+ }
832
+ else if (whence === AVSEEK_CUR) {
833
+ position += offsetNum;
834
+ }
835
+ else if (whence === AVSEEK_END) {
836
+ position = sdpBuffer.length + offsetNum;
837
+ }
838
+ return position;
839
+ },
840
+ };
841
+ // Create UDP socket for sending packets to FFmpeg
842
+ const udpSocket = createSocket('udp4');
843
+ try {
844
+ // Open Demuxer with SDP format using custom I/O
845
+ const input = Demuxer.openSync(callbacks, {
846
+ format: 'sdp',
847
+ skipStreamInfo: true,
848
+ options: {
849
+ protocol_whitelist: 'pipe,udp,rtp,file,crypto',
850
+ listen_timeout: -1,
851
+ },
852
+ });
853
+ const sendPacket = (rtpPacket, streamIndex = 0) => {
854
+ const port = ports[streamIndex];
855
+ if (!port) {
856
+ throw new Error(`No port found for stream index ${streamIndex}. Available streams: ${ports.length}`);
857
+ }
858
+ udpSocket.send(rtpPacket, port, '127.0.0.1');
859
+ };
860
+ const close = async () => {
861
+ await input.close();
862
+ udpSocket.close();
863
+ };
864
+ const closeSync = () => {
865
+ input.closeSync();
866
+ udpSocket.close();
867
+ };
868
+ return { input, sendPacket, close, closeSync };
869
+ }
870
+ catch (error) {
871
+ // Cleanup on error
872
+ udpSocket.close();
873
+ throw error;
874
+ }
875
+ }
876
+ /**
877
+ * Check if input is open.
878
+ *
879
+ * @example
880
+ * ```typescript
881
+ * if (!input.isInputOpen) {
882
+ * console.log('Input is not open');
883
+ * }
884
+ * ```
885
+ */
886
+ get isInputOpen() {
887
+ return !this.isClosed;
888
+ }
889
+ /**
890
+ * Get all streams in the media.
891
+ *
892
+ * @example
893
+ * ```typescript
894
+ * for (const stream of input.streams) {
895
+ * console.log(`Stream ${stream.index}: ${stream.codecpar.codecType}`);
896
+ * }
897
+ * ```
898
+ */
899
+ get streams() {
900
+ return this._streams;
901
+ }
902
+ /**
903
+ * Get media duration in seconds.
904
+ *
905
+ * Returns 0 if duration is unknown or not available or input is closed.
906
+ *
907
+ * @example
908
+ * ```typescript
909
+ * console.log(`Duration: ${input.duration} seconds`);
910
+ * ```
911
+ */
912
+ get duration() {
913
+ if (this.isClosed) {
914
+ return 0;
915
+ }
916
+ const duration = this.formatContext.duration;
917
+ if (!duration || duration <= 0) {
918
+ return 0;
919
+ }
920
+ // Convert from AV_TIME_BASE (microseconds) to seconds
921
+ return Number(duration) / 1000000;
922
+ }
923
+ /**
924
+ * Get media start time in seconds.
925
+ *
926
+ * For device inputs (e.g., avfoundation), this reflects the system uptime
927
+ * at capture start. Pass this value to `Muxer.open()` via the `startTime`
928
+ * option to produce correct output timestamps.
929
+ *
930
+ * Returns 0 if start time is unknown or not available or input is closed.
931
+ *
932
+ * @example
933
+ * ```typescript
934
+ * await using input = await Demuxer.open(source);
935
+ * await using output = await Muxer.open('output.mp4', { startTime: input.startTime });
936
+ * ```
937
+ */
938
+ get startTime() {
939
+ if (this.isClosed) {
940
+ return 0;
941
+ }
942
+ const startTime = this.formatContext.startTime;
943
+ if (!startTime || startTime <= 0n) {
944
+ return 0;
945
+ }
946
+ // Convert from AV_TIME_BASE (microseconds) to seconds
947
+ return Number(startTime) / 1000000;
948
+ }
949
+ /**
950
+ * Get media bitrate in kilobits per second.
951
+ *
952
+ * Returns 0 if bitrate is unknown or not available or input is closed.
953
+ *
954
+ * @example
955
+ * ```typescript
956
+ * console.log(`Bitrate: ${input.bitRate} kbps`);
957
+ * ```
958
+ */
959
+ get bitRate() {
960
+ if (this.isClosed) {
961
+ return 0;
962
+ }
963
+ const bitrate = this.formatContext.bitRate;
964
+ if (!bitrate || bitrate <= 0) {
965
+ return 0;
966
+ }
967
+ // Convert from bits per second to kilobits per second
968
+ return Number(bitrate) / 1000;
969
+ }
970
+ /**
971
+ * Get media metadata.
972
+ *
973
+ * Returns all metadata tags as key-value pairs.
974
+ *
975
+ * @example
976
+ * ```typescript
977
+ * const metadata = input.metadata;
978
+ * console.log(`Title: ${metadata.title}`);
979
+ * console.log(`Artist: ${metadata.artist}`);
980
+ * ```
981
+ */
982
+ get metadata() {
983
+ if (this.isClosed) {
984
+ return {};
985
+ }
986
+ return this.formatContext.metadata?.getAll() ?? {};
987
+ }
988
+ /**
989
+ * Get format name.
990
+ *
991
+ * Returns 'unknown' if input is closed or format is not available.
992
+ *
993
+ * @example
994
+ * ```typescript
995
+ * console.log(`Format: ${input.formatName}`); // "mov,mp4,m4a,3gp,3g2,mj2"
996
+ * ```
997
+ */
998
+ get formatName() {
999
+ if (this.isClosed) {
1000
+ return 'unknown';
1001
+ }
1002
+ return this.formatContext.iformat?.name ?? 'unknown';
1003
+ }
1004
+ /**
1005
+ * Get format long name.
1006
+ *
1007
+ * Returns 'Unknown Format' if input is closed or format is not available.
1008
+ *
1009
+ * @example
1010
+ * ```typescript
1011
+ * console.log(`Format: ${input.formatLongName}`); // "QuickTime / MOV"
1012
+ * ```
1013
+ */
1014
+ get formatLongName() {
1015
+ if (this.isClosed) {
1016
+ return 'Unknown Format';
1017
+ }
1018
+ return this.formatContext.iformat?.longName ?? 'Unknown Format';
1019
+ }
1020
+ /**
1021
+ * Get MIME type of the input format.
1022
+ *
1023
+ * Returns null if input is closed or format is not available.
1024
+ *
1025
+ * @example
1026
+ * ```typescript
1027
+ * console.log(`MIME Type: ${input.mimeType}`); // "video/mp4"
1028
+ * ```
1029
+ */
1030
+ get mimeType() {
1031
+ if (this.isClosed) {
1032
+ return null;
1033
+ }
1034
+ return this.formatContext.iformat?.mimeType ?? null;
1035
+ }
1036
+ /**
1037
+ * Get input stream by index.
1038
+ *
1039
+ * Returns the stream at the specified index.
1040
+ *
1041
+ * @param index - Stream index
1042
+ *
1043
+ * @returns Stream or undefined if index is invalid
1044
+ *
1045
+ * @example
1046
+ * ```typescript
1047
+ * const input = await Demuxer.open('input.mp4');
1048
+ *
1049
+ * // Get the input stream to inspect codec parameters
1050
+ * const stream = input.getStream(1); // Get stream at index 1
1051
+ * if (stream) {
1052
+ * console.log(`Input codec: ${stream.codecpar.codecId}`);
1053
+ * }
1054
+ * ```
1055
+ *
1056
+ * @see {@link video} For getting video streams
1057
+ * @see {@link audio} For getting audio streams
1058
+ */
1059
+ getStream(index) {
1060
+ const streams = this.formatContext.streams;
1061
+ if (!streams || index < 0 || index >= streams.length) {
1062
+ return undefined;
1063
+ }
1064
+ return streams[index];
1065
+ }
1066
+ /**
1067
+ * Get video stream by index.
1068
+ *
1069
+ * Returns the nth video stream (0-based index).
1070
+ * Returns undefined if stream doesn't exist.
1071
+ *
1072
+ * @param index - Video stream index (default: 0)
1073
+ *
1074
+ * @returns Video stream or undefined
1075
+ *
1076
+ * @example
1077
+ * ```typescript
1078
+ * const videoStream = input.video();
1079
+ * if (videoStream) {
1080
+ * console.log(`Video: ${videoStream.codecpar.width}x${videoStream.codecpar.height}`);
1081
+ * }
1082
+ * ```
1083
+ *
1084
+ * @example
1085
+ * ```typescript
1086
+ * // Get second video stream
1087
+ * const secondVideo = input.video(1);
1088
+ * ```
1089
+ *
1090
+ * @see {@link audio} For audio streams
1091
+ * @see {@link findBestStream} For automatic selection
1092
+ */
1093
+ video(index = 0) {
1094
+ const streams = this._streams.filter((s) => s.codecpar.codecType === AVMEDIA_TYPE_VIDEO);
1095
+ return streams[index];
1096
+ }
1097
+ /**
1098
+ * Get audio stream by index.
1099
+ *
1100
+ * Returns the nth audio stream (0-based index).
1101
+ * Returns undefined if stream doesn't exist.
1102
+ *
1103
+ * @param index - Audio stream index (default: 0)
1104
+ *
1105
+ * @returns Audio stream or undefined
1106
+ *
1107
+ * @example
1108
+ * ```typescript
1109
+ * const audioStream = input.audio();
1110
+ * if (audioStream) {
1111
+ * console.log(`Audio: ${audioStream.codecpar.sampleRate}Hz`);
1112
+ * }
1113
+ * ```
1114
+ *
1115
+ * @example
1116
+ * ```typescript
1117
+ * // Get second audio stream
1118
+ * const secondAudio = input.audio(1);
1119
+ * ```
1120
+ *
1121
+ * @see {@link video} For video streams
1122
+ * @see {@link findBestStream} For automatic selection
1123
+ */
1124
+ audio(index = 0) {
1125
+ const streams = this._streams.filter((s) => s.codecpar.codecType === AVMEDIA_TYPE_AUDIO);
1126
+ return streams[index];
1127
+ }
1128
+ /**
1129
+ * Get input format details.
1130
+ *
1131
+ * Returns null if input is closed or format is not available.
1132
+ *
1133
+ * @returns Input format or null
1134
+ *
1135
+ * @example
1136
+ * ```typescript
1137
+ * const inputFormat = input.inputFormat;
1138
+ * if (inputFormat) {
1139
+ * console.log(`Input Format: ${inputFormat.name}`);
1140
+ * }
1141
+ * ```
1142
+ */
1143
+ inputFormat() {
1144
+ return this.formatContext.iformat;
1145
+ }
1146
+ /**
1147
+ * Find the best stream of a given type.
1148
+ *
1149
+ * Uses FFmpeg's stream selection algorithm.
1150
+ * Considers codec support, default flags, and quality.
1151
+ *
1152
+ * Direct mapping to av_find_best_stream().
1153
+ *
1154
+ * @param type - Media type to find
1155
+ *
1156
+ * @returns Best stream or undefined if not found or input is closed
1157
+ *
1158
+ * @example
1159
+ * ```typescript
1160
+ * import { AVMEDIA_TYPE_VIDEO } from 'node-av/constants';
1161
+ *
1162
+ * const bestVideo = input.findBestStream(AVMEDIA_TYPE_VIDEO);
1163
+ * if (bestVideo) {
1164
+ * const decoder = await Decoder.create(bestVideo);
1165
+ * }
1166
+ * ```
1167
+ *
1168
+ * @see {@link video} For direct video stream access
1169
+ * @see {@link audio} For direct audio stream access
1170
+ */
1171
+ findBestStream(type) {
1172
+ if (this.isClosed) {
1173
+ return undefined;
1174
+ }
1175
+ const bestStreamIndex = this.formatContext.findBestStream(type);
1176
+ return this._streams.find((s) => s.index === bestStreamIndex);
1177
+ }
1178
+ /**
1179
+ * Read packets from media as async generator.
1180
+ *
1181
+ * Yields demuxed packets for processing.
1182
+ * Automatically handles packet memory management.
1183
+ * Optionally filters packets by stream index.
1184
+ *
1185
+ * **Supports parallel generators**: Multiple `packets()` iterators can run concurrently.
1186
+ * When multiple generators are active, an internal demux thread automatically handles
1187
+ * packet distribution to avoid race conditions.
1188
+ *
1189
+ * Direct mapping to av_read_frame().
1190
+ *
1191
+ * @param index - Optional stream index to filter
1192
+ *
1193
+ * @yields {Packet} Demuxed packets (must be freed by caller)
1194
+ *
1195
+ * @throws {Error} If packet cloning fails
1196
+ *
1197
+ * @example
1198
+ * ```typescript
1199
+ * // Read all packets
1200
+ * for await (const packet of input.packets()) {
1201
+ * console.log(`Packet: stream=${packet.streamIndex}, pts=${packet.pts}`);
1202
+ * packet.free();
1203
+ * }
1204
+ * ```
1205
+ *
1206
+ * @example
1207
+ * ```typescript
1208
+ * // Read only video packets
1209
+ * const videoStream = input.video();
1210
+ * for await (const packet of input.packets(videoStream.index)) {
1211
+ * // Process video packet
1212
+ * packet.free();
1213
+ * }
1214
+ * ```
1215
+ *
1216
+ * @example
1217
+ * ```typescript
1218
+ * // Parallel processing of video and audio streams
1219
+ * const videoGen = input.packets(videoStream.index);
1220
+ * const audioGen = input.packets(audioStream.index);
1221
+ *
1222
+ * await Promise.all([
1223
+ * (async () => {
1224
+ * for await (const packet of videoGen) {
1225
+ * // Process video
1226
+ * packet.free();
1227
+ * }
1228
+ * })(),
1229
+ * (async () => {
1230
+ * for await (const packet of audioGen) {
1231
+ * // Process audio
1232
+ * packet.free();
1233
+ * }
1234
+ * })()
1235
+ * ]);
1236
+ * ```
1237
+ *
1238
+ * @see {@link Decoder.frames} For decoding packets
1239
+ */
1240
+ async *packets(index) {
1241
+ // Register this generator
1242
+ this.activeGenerators++;
1243
+ const queueKey = index ?? 'all';
1244
+ // Initialize queue for this generator
1245
+ if (!this.packetQueues.has(queueKey)) {
1246
+ this.packetQueues.set(queueKey, []);
1247
+ }
1248
+ // Always start demux thread (handles single and multiple generators)
1249
+ this.startDemuxThread();
1250
+ let aborted = false;
1251
+ try {
1252
+ let hasSeenKeyframe = !this.options.startWithKeyframe;
1253
+ // Read from queue (demux thread is handling av_read_frame)
1254
+ const queue = this.packetQueues.get(queueKey);
1255
+ while (!this.isClosed) {
1256
+ if (this.signal?.aborted) {
1257
+ aborted = true;
1258
+ break;
1259
+ }
1260
+ // Try to get packet from queue
1261
+ let packet = queue.shift();
1262
+ // If queue is empty, wait for next packet
1263
+ if (!packet) {
1264
+ // Check for EOF first
1265
+ if (this.demuxEof) {
1266
+ break; // End of stream
1267
+ }
1268
+ // Create promise and register resolver
1269
+ const { promise, resolve } = Promise.withResolvers();
1270
+ this.queueResolvers.set(queueKey, resolve);
1271
+ // Wait for demux thread to add packet
1272
+ await promise;
1273
+ // Check for abort after wakeup
1274
+ if (this.signal?.aborted) {
1275
+ aborted = true;
1276
+ break;
1277
+ }
1278
+ // Check again after wakeup
1279
+ if (this.demuxEof) {
1280
+ break;
1281
+ }
1282
+ packet = queue.shift();
1283
+ if (!packet) {
1284
+ continue;
1285
+ }
1286
+ }
1287
+ // Apply keyframe filtering if needed
1288
+ if (!hasSeenKeyframe) {
1289
+ const stream = this._streams[packet.streamIndex];
1290
+ const isVideoStream = stream?.codecpar.codecType === AVMEDIA_TYPE_VIDEO;
1291
+ if (isVideoStream && packet.isKeyframe) {
1292
+ hasSeenKeyframe = true;
1293
+ }
1294
+ else if (isVideoStream && !packet.isKeyframe) {
1295
+ packet.free();
1296
+ continue;
1297
+ }
1298
+ }
1299
+ yield packet;
1300
+ }
1301
+ }
1302
+ finally {
1303
+ // Unregister this generator
1304
+ this.activeGenerators--;
1305
+ // Stop demux thread if no more generators
1306
+ if (this.activeGenerators === 0) {
1307
+ await this.stopDemuxThread();
1308
+ }
1309
+ if (!aborted) {
1310
+ yield null; // Signal EOF
1311
+ }
1312
+ }
1313
+ // Throw after generator cleanup — only reachable when aborted
1314
+ // (when not aborted, yield null in finally already terminated the generator)
1315
+ this.signal?.throwIfAborted();
1316
+ }
1317
+ /**
1318
+ * Read packets from media as generator synchronously.
1319
+ * Synchronous version of packets.
1320
+ *
1321
+ * Yields demuxed packets for processing.
1322
+ * Automatically handles packet memory management.
1323
+ * Optionally filters packets by stream index.
1324
+ *
1325
+ * Direct mapping to av_read_frame().
1326
+ *
1327
+ * @param index - Optional stream index to filter
1328
+ *
1329
+ * @yields {Packet} Demuxed packets (must be freed by caller)
1330
+ *
1331
+ * @throws {Error} If packet cloning fails
1332
+ *
1333
+ * @example
1334
+ * ```typescript
1335
+ * // Read all packets
1336
+ * for (const packet of input.packetsSync()) {
1337
+ * console.log(`Packet: stream=${packet.streamIndex}, pts=${packet.pts}`);
1338
+ * packet.free();
1339
+ * }
1340
+ * ```
1341
+ *
1342
+ * @example
1343
+ * ```typescript
1344
+ * // Read only video packets
1345
+ * const videoStream = input.video();
1346
+ * for (const packet of input.packetsSync(videoStream.index)) {
1347
+ * // Process video packet
1348
+ * packet.free();
1349
+ * }
1350
+ * ```
1351
+ *
1352
+ * @see {@link packets} For async version
1353
+ */
1354
+ *packetsSync(index) {
1355
+ const env_1 = { stack: [], error: void 0, hasError: false };
1356
+ try {
1357
+ const packet = __addDisposableResource(env_1, new Packet(), false);
1358
+ packet.alloc();
1359
+ let hasSeenKeyframe = !this.options.startWithKeyframe;
1360
+ while (!this.isClosed) {
1361
+ const ret = this.formatContext.readFrameSync(packet);
1362
+ if (ret < 0) {
1363
+ break;
1364
+ }
1365
+ // Get stream for timestamp processing
1366
+ const stream = this._streams[packet.streamIndex];
1367
+ if (stream) {
1368
+ // Set packet timebase to stream timebase
1369
+ // This must be done BEFORE any timestamp processing
1370
+ packet.timeBase = stream.timeBase;
1371
+ // Apply timestamp processing
1372
+ // 1. PTS wrap-around correction
1373
+ this.ptsWrapAroundCorrection(packet, stream);
1374
+ // 2. Timestamp discontinuity processing
1375
+ this.timestampDiscontinuityProcess(packet, stream);
1376
+ // 3. DTS prediction/update
1377
+ this.dtsPredict(packet, stream);
1378
+ }
1379
+ if (index === undefined || packet.streamIndex === index) {
1380
+ // If startWithKeyframe is enabled, skip packets until we see a keyframe
1381
+ // Only apply to video streams - audio packets should always pass through
1382
+ if (!hasSeenKeyframe) {
1383
+ const stream = this._streams[packet.streamIndex];
1384
+ const isVideoStream = stream?.codecpar.codecType === AVMEDIA_TYPE_VIDEO;
1385
+ if (isVideoStream && packet.isKeyframe) {
1386
+ hasSeenKeyframe = true;
1387
+ }
1388
+ else if (isVideoStream && !packet.isKeyframe) {
1389
+ // Skip video P-frames until first keyframe
1390
+ packet.unref();
1391
+ continue;
1392
+ }
1393
+ // Non-video streams (audio, etc.) always pass through
1394
+ }
1395
+ // Clone the packet for the user
1396
+ // This creates a new Packet object that shares the same data buffer
1397
+ // through reference counting. The data won't be freed until both
1398
+ // the original and the clone are unreferenced.
1399
+ const cloned = packet.clone();
1400
+ if (!cloned) {
1401
+ throw new Error('Failed to clone packet (out of memory)');
1402
+ }
1403
+ yield cloned;
1404
+ }
1405
+ // Unreference the original packet's data buffer
1406
+ // This allows us to reuse the packet object for the next readFrame()
1407
+ // The data itself is still alive because the clone has a reference
1408
+ packet.unref();
1409
+ }
1410
+ // Signal EOF
1411
+ yield null;
1412
+ }
1413
+ catch (e_1) {
1414
+ env_1.error = e_1;
1415
+ env_1.hasError = true;
1416
+ }
1417
+ finally {
1418
+ __disposeResources(env_1);
1419
+ }
1420
+ }
1421
+ /**
1422
+ * Seek to timestamp in media.
1423
+ *
1424
+ * Seeks to the specified position in seconds.
1425
+ * Can seek in specific stream or globally.
1426
+ *
1427
+ * Direct mapping to av_seek_frame().
1428
+ *
1429
+ * @param timestamp - Target position in seconds
1430
+ *
1431
+ * @param streamIndex - Stream index or -1 for global (default: -1)
1432
+ *
1433
+ * @param flags - Seek flags (default: AVFLAG_NONE)
1434
+ *
1435
+ * @returns 0 on success, negative on error
1436
+ *
1437
+ * @throws {Error} If input is closed
1438
+ *
1439
+ * @example
1440
+ * ```typescript
1441
+ * // Seek to 30 seconds
1442
+ * const ret = await input.seek(30);
1443
+ * FFmpegError.throwIfError(ret, 'seek failed');
1444
+ * ```
1445
+ *
1446
+ * @example
1447
+ * ```typescript
1448
+ * import { AVSEEK_FLAG_BACKWARD } from 'node-av/constants';
1449
+ *
1450
+ * // Seek to keyframe before 60 seconds
1451
+ * await input.seek(60, -1, AVSEEK_FLAG_BACKWARD);
1452
+ * ```
1453
+ *
1454
+ * @see {@link AVSeekFlag} For seek flags
1455
+ */
1456
+ async seek(timestamp, streamIndex = -1, flags = AVFLAG_NONE) {
1457
+ this.signal?.throwIfAborted();
1458
+ if (this.isClosed) {
1459
+ throw new Error('Cannot seek on closed input');
1460
+ }
1461
+ // Convert seconds to AV_TIME_BASE
1462
+ const ts = BigInt(Math.floor(timestamp * 1000000));
1463
+ return this.formatContext.seekFrame(streamIndex, ts, flags);
1464
+ }
1465
+ /**
1466
+ * Seek to timestamp in media synchronously.
1467
+ * Synchronous version of seek.
1468
+ *
1469
+ * Seeks to the specified position in seconds.
1470
+ * Can seek in specific stream or globally.
1471
+ *
1472
+ * Direct mapping to av_seek_frame().
1473
+ *
1474
+ * @param timestamp - Target position in seconds
1475
+ *
1476
+ * @param streamIndex - Stream index or -1 for global (default: -1)
1477
+ *
1478
+ * @param flags - Seek flags (default: AVFLAG_NONE)
1479
+ *
1480
+ * @returns 0 on success, negative on error
1481
+ *
1482
+ * @throws {Error} If input is closed
1483
+ *
1484
+ * @example
1485
+ * ```typescript
1486
+ * // Seek to 30 seconds
1487
+ * const ret = input.seekSync(30);
1488
+ * FFmpegError.throwIfError(ret, 'seek failed');
1489
+ * ```
1490
+ *
1491
+ * @example
1492
+ * ```typescript
1493
+ * import { AVSEEK_FLAG_BACKWARD } from 'node-av/constants';
1494
+ *
1495
+ * // Seek to keyframe before 60 seconds
1496
+ * input.seekSync(60, -1, AVSEEK_FLAG_BACKWARD);
1497
+ * ```
1498
+ *
1499
+ * @see {@link seek} For async version
1500
+ */
1501
+ seekSync(timestamp, streamIndex = -1, flags = AVFLAG_NONE) {
1502
+ if (this.isClosed) {
1503
+ throw new Error('Cannot seek on closed input');
1504
+ }
1505
+ // Convert seconds to AV_TIME_BASE
1506
+ const ts = BigInt(Math.floor(timestamp * 1000000));
1507
+ return this.formatContext.seekFrameSync(streamIndex, ts, flags);
1508
+ }
1509
+ /**
1510
+ * Start the internal demux thread for handling multiple parallel packet generators.
1511
+ * This thread reads packets from the format context and distributes them to queues.
1512
+ *
1513
+ * @internal
1514
+ */
1515
+ startDemuxThread() {
1516
+ if (this.demuxThreadActive || this.demuxThread) {
1517
+ return; // Already running
1518
+ }
1519
+ this.demuxThreadActive = true;
1520
+ if (this.signal && !this.signalCleanup) {
1521
+ const handler = () => {
1522
+ this.demuxThreadActive = false;
1523
+ for (const resolve of this.queueResolvers.values()) {
1524
+ resolve();
1525
+ }
1526
+ this.queueResolvers.clear();
1527
+ };
1528
+ this.signal.addEventListener('abort', handler, { once: true });
1529
+ this.signalCleanup = () => this.signal?.removeEventListener('abort', handler);
1530
+ }
1531
+ this.demuxThread = (async () => {
1532
+ const env_2 = { stack: [], error: void 0, hasError: false };
1533
+ try {
1534
+ const packet = __addDisposableResource(env_2, new Packet(), false);
1535
+ packet.alloc();
1536
+ while (this.demuxThreadActive && !this.isClosed) {
1537
+ // Check if all queues are full - if so, wait a bit
1538
+ let allQueuesFull = true;
1539
+ for (const queue of this.packetQueues.values()) {
1540
+ if (queue.length < MAX_INPUT_QUEUE_SIZE) {
1541
+ allQueuesFull = false;
1542
+ break;
1543
+ }
1544
+ }
1545
+ if (allQueuesFull) {
1546
+ await new Promise(setImmediate);
1547
+ continue;
1548
+ }
1549
+ // Read next packet
1550
+ const ret = await this.formatContext.readFrame(packet);
1551
+ // IMPORTANT: Check isClosed again after await - the demuxer may have been
1552
+ // closed while we were waiting for readFrame(). If closed, the native
1553
+ // AVStreams have been freed and accessing them would cause use-after-free!
1554
+ if (this.isClosed) {
1555
+ break;
1556
+ }
1557
+ if (ret < 0) {
1558
+ // EAGAIN means no data available yet (common with live device capture)
1559
+ // Retry after a short delay instead of treating as EOF
1560
+ // Matches FFmpeg CLI behavior: av_usleep(10000) in ffmpeg_demux.c
1561
+ if (ret === AVERROR_EAGAIN) {
1562
+ await new Promise((resolve) => setTimeout(resolve, 10));
1563
+ continue;
1564
+ }
1565
+ // Actual end of stream - notify all waiting consumers
1566
+ this.demuxEof = true;
1567
+ for (const resolve of this.queueResolvers.values()) {
1568
+ resolve();
1569
+ }
1570
+ this.queueResolvers.clear();
1571
+ break;
1572
+ }
1573
+ // Get stream for timestamp processing
1574
+ const stream = this._streams[packet.streamIndex];
1575
+ if (stream) {
1576
+ packet.timeBase = stream.timeBase;
1577
+ this.ptsWrapAroundCorrection(packet, stream);
1578
+ this.timestampDiscontinuityProcess(packet, stream);
1579
+ this.dtsPredict(packet, stream);
1580
+ }
1581
+ // Find which queues need this packet
1582
+ const allQueue = this.packetQueues.get('all');
1583
+ const streamQueue = this.packetQueues.get(packet.streamIndex);
1584
+ const targetQueues = [];
1585
+ if (allQueue && allQueue.length < MAX_INPUT_QUEUE_SIZE) {
1586
+ targetQueues.push({ queue: allQueue, event: 'packet-all' });
1587
+ }
1588
+ // Only add stream queue if it's different from 'all' queue
1589
+ if (streamQueue && streamQueue !== allQueue && streamQueue.length < MAX_INPUT_QUEUE_SIZE) {
1590
+ targetQueues.push({ queue: streamQueue, event: `packet-${packet.streamIndex}` });
1591
+ }
1592
+ if (targetQueues.length === 0) {
1593
+ // No queue needs this packet, skip it
1594
+ packet.unref();
1595
+ continue;
1596
+ }
1597
+ // Clone once, then share reference for additional queues
1598
+ const firstClone = packet.clone();
1599
+ if (!firstClone) {
1600
+ throw new Error('Failed to clone packet in demux thread (out of memory)');
1601
+ }
1602
+ // Add to first queue and resolve waiting promise
1603
+ const firstKey = targetQueues[0].event.replace('packet-', '') === 'all' ? 'all' : packet.streamIndex;
1604
+ targetQueues[0].queue.push(firstClone);
1605
+ const firstResolver = this.queueResolvers.get(firstKey);
1606
+ if (firstResolver) {
1607
+ firstResolver();
1608
+ this.queueResolvers.delete(firstKey);
1609
+ }
1610
+ // Additional queues get clones (shares data buffer via reference counting)
1611
+ for (let i = 1; i < targetQueues.length; i++) {
1612
+ const additionalClone = firstClone.clone();
1613
+ if (!additionalClone) {
1614
+ throw new Error('Failed to clone packet for additional queue (out of memory)');
1615
+ }
1616
+ const queueKey = targetQueues[i].event.replace('packet-', '') === 'all' ? 'all' : packet.streamIndex;
1617
+ targetQueues[i].queue.push(additionalClone);
1618
+ const resolver = this.queueResolvers.get(queueKey);
1619
+ if (resolver) {
1620
+ resolver();
1621
+ this.queueResolvers.delete(queueKey);
1622
+ }
1623
+ }
1624
+ packet.unref();
1625
+ }
1626
+ this.demuxThreadActive = false;
1627
+ }
1628
+ catch (e_2) {
1629
+ env_2.error = e_2;
1630
+ env_2.hasError = true;
1631
+ }
1632
+ finally {
1633
+ __disposeResources(env_2);
1634
+ }
1635
+ })();
1636
+ }
1637
+ /**
1638
+ * Stop the internal demux thread.
1639
+ *
1640
+ * @internal
1641
+ */
1642
+ async stopDemuxThread() {
1643
+ this.demuxThreadActive = false;
1644
+ this.demuxEof = true;
1645
+ // Wake up any waiting generators
1646
+ for (const resolve of this.queueResolvers.values()) {
1647
+ resolve();
1648
+ }
1649
+ this.queueResolvers.clear();
1650
+ // Wait for demux thread with timeout to avoid hanging on blocked reads
1651
+ if (this.demuxThread) {
1652
+ const threadPromise = this.demuxThread;
1653
+ let timer;
1654
+ const timeoutPromise = new Promise((resolve) => {
1655
+ timer = setTimeout(resolve, 2000);
1656
+ timer.unref();
1657
+ });
1658
+ await Promise.race([threadPromise, timeoutPromise]);
1659
+ clearTimeout(timer);
1660
+ this.demuxThread = null;
1661
+ }
1662
+ // Clear all queues
1663
+ for (const queue of this.packetQueues.values()) {
1664
+ for (const packet of queue) {
1665
+ packet.free();
1666
+ }
1667
+ queue.length = 0;
1668
+ }
1669
+ this.packetQueues.clear();
1670
+ }
1671
+ /**
1672
+ * Get or create stream state for timestamp processing.
1673
+ *
1674
+ * @param streamIndex - Stream index
1675
+ *
1676
+ * @returns Stream state
1677
+ *
1678
+ * @internal
1679
+ */
1680
+ getStreamState(streamIndex) {
1681
+ let state = this.streamStates.get(streamIndex);
1682
+ if (!state) {
1683
+ state = {
1684
+ wrapCorrectionDone: false,
1685
+ sawFirstTs: false,
1686
+ firstDts: AV_NOPTS_VALUE,
1687
+ nextDts: AV_NOPTS_VALUE,
1688
+ dts: AV_NOPTS_VALUE,
1689
+ };
1690
+ this.streamStates.set(streamIndex, state);
1691
+ }
1692
+ return state;
1693
+ }
1694
+ /**
1695
+ * PTS Wrap-Around Correction.
1696
+ *
1697
+ * Based on FFmpeg's ts_fixup().
1698
+ *
1699
+ * Corrects timestamp wrap-around for streams with limited timestamp bits.
1700
+ * DVB streams typically use 31-bit timestamps that wrap around.
1701
+ * Without correction, timestamps become negative causing playback errors.
1702
+ *
1703
+ * Handles:
1704
+ * - Detects wrap-around based on pts_wrap_bits from stream
1705
+ * - Applies correction once per stream
1706
+ * - Corrects both PTS and DTS
1707
+ *
1708
+ * @param packet - Packet to correct
1709
+ *
1710
+ * @param stream - Stream metadata
1711
+ *
1712
+ * @internal
1713
+ */
1714
+ ptsWrapAroundCorrection(packet, stream) {
1715
+ const state = this.getStreamState(packet.streamIndex);
1716
+ // Already corrected or no wrap bits configured
1717
+ if (state.wrapCorrectionDone || stream.ptsWrapBits >= 64) {
1718
+ return;
1719
+ }
1720
+ const startTime = this.formatContext.startTime;
1721
+ if (startTime === AV_NOPTS_VALUE) {
1722
+ return;
1723
+ }
1724
+ const ptsWrapBits = stream.ptsWrapBits;
1725
+ // Rescale start_time to packet's timebase
1726
+ // Note: packet.timeBase was set to stream.timeBase in packets() generator
1727
+ const stime = avRescaleQ(startTime, AV_TIME_BASE_Q, packet.timeBase);
1728
+ const stime2 = stime + (1n << BigInt(ptsWrapBits));
1729
+ state.wrapCorrectionDone = true;
1730
+ const wrapThreshold = stime + (1n << BigInt(ptsWrapBits - 1));
1731
+ // Check DTS for wrap-around
1732
+ if (stime2 > stime && packet.dts !== AV_NOPTS_VALUE && packet.dts > wrapThreshold) {
1733
+ packet.dts -= 1n << BigInt(ptsWrapBits);
1734
+ state.wrapCorrectionDone = false; // May wrap again
1735
+ }
1736
+ // Check PTS for wrap-around
1737
+ if (stime2 > stime && packet.pts !== AV_NOPTS_VALUE && packet.pts > wrapThreshold) {
1738
+ packet.pts -= 1n << BigInt(ptsWrapBits);
1739
+ state.wrapCorrectionDone = false; // May wrap again
1740
+ }
1741
+ }
1742
+ /**
1743
+ * DTS Prediction and Update.
1744
+ *
1745
+ * Based on FFmpeg's ist_dts_update().
1746
+ *
1747
+ * Predicts next expected DTS for frame ordering validation and discontinuity detection.
1748
+ * Uses codec-specific logic:
1749
+ * - Audio: Based on sample_rate and frame_size
1750
+ * - Video: Based on framerate or duration
1751
+ *
1752
+ * Handles:
1753
+ * - First timestamp initialization
1754
+ * - Codec-specific duration calculation
1755
+ * - DTS sequence tracking
1756
+ *
1757
+ * @param packet - Packet to process
1758
+ *
1759
+ * @param stream - Stream metadata
1760
+ *
1761
+ * @internal
1762
+ */
1763
+ dtsPredict(packet, stream) {
1764
+ const state = this.getStreamState(packet.streamIndex);
1765
+ // Call native implementation with native objects
1766
+ const newState = nativeDtsPredict(packet, stream, {
1767
+ sawFirstTs: state.sawFirstTs,
1768
+ dts: state.dts,
1769
+ nextDts: state.nextDts,
1770
+ firstDts: state.firstDts,
1771
+ });
1772
+ // Update state with results
1773
+ state.sawFirstTs = newState.sawFirstTs;
1774
+ state.dts = newState.dts;
1775
+ state.nextDts = newState.nextDts;
1776
+ state.firstDts = newState.firstDts;
1777
+ }
1778
+ /**
1779
+ * Timestamp Discontinuity Detection.
1780
+ *
1781
+ * Based on FFmpeg's ts_discontinuity_detect().
1782
+ *
1783
+ * Detects and corrects timestamp discontinuities in streams.
1784
+ * Handles two cases:
1785
+ * - Discontinuous formats (MPEG-TS): Apply offset correction
1786
+ * - Continuous formats (MP4): Mark timestamps as invalid
1787
+ *
1788
+ * Handles:
1789
+ * - Format-specific discontinuity handling (AVFMT_TS_DISCONT flag)
1790
+ * - PTS wrap-around detection for streams with limited timestamp bits
1791
+ * - Intra-stream discontinuity detection
1792
+ * - Inter-stream discontinuity detection
1793
+ * - Offset accumulation and application
1794
+ * - copyTs mode with selective correction
1795
+ *
1796
+ * @param packet - Packet to check for discontinuities
1797
+ *
1798
+ * @param stream - Stream metadata
1799
+ *
1800
+ * @internal
1801
+ */
1802
+ timestampDiscontinuityDetect(packet, stream) {
1803
+ const state = this.getStreamState(packet.streamIndex);
1804
+ const inputFormat = this.formatContext.iformat;
1805
+ // Check if format declares timestamp discontinuities
1806
+ const fmtIsDiscont = !!(inputFormat && inputFormat.flags & AVFMT_TS_DISCONT);
1807
+ // Disable correction when copyTs is enabled
1808
+ let disableDiscontinuityCorrection = this.options.copyTs;
1809
+ // Rescale packet DTS to AV_TIME_BASE for comparison
1810
+ const pktDts = avRescaleQRnd(packet.dts, packet.timeBase, AV_TIME_BASE_Q, (AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
1811
+ // PTS wrap-around detection
1812
+ // Only applies when copyTs is enabled and stream has limited timestamp bits
1813
+ if (this.options.copyTs && state.nextDts !== AV_NOPTS_VALUE && fmtIsDiscont && stream.ptsWrapBits < 60) {
1814
+ // Calculate wrapped DTS by adding 2^pts_wrap_bits to packet DTS
1815
+ const wrapDts = avRescaleQRnd(packet.dts + (1n << BigInt(stream.ptsWrapBits)), packet.timeBase, AV_TIME_BASE_Q, (AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
1816
+ // If wrapped DTS is closer to predicted nextDts, enable correction
1817
+ const wrapDelta = wrapDts > state.nextDts ? wrapDts - state.nextDts : state.nextDts - wrapDts;
1818
+ const normalDelta = pktDts > state.nextDts ? pktDts - state.nextDts : state.nextDts - pktDts;
1819
+ if (wrapDelta < normalDelta / 10n) {
1820
+ disableDiscontinuityCorrection = false;
1821
+ }
1822
+ }
1823
+ // Intra-stream discontinuity detection
1824
+ if (state.nextDts !== AV_NOPTS_VALUE && !disableDiscontinuityCorrection) {
1825
+ const delta = pktDts - state.nextDts;
1826
+ if (fmtIsDiscont) {
1827
+ // Discontinuous format (e.g., MPEG-TS) - apply offset correction
1828
+ const threshold = BigInt(this.options.dtsDeltaThreshold) * BigInt(AV_TIME_BASE);
1829
+ if (delta > threshold || delta < -threshold || pktDts + BigInt(AV_TIME_BASE) / 10n < state.dts) {
1830
+ this.tsOffsetDiscont -= delta;
1831
+ // Apply correction to packet
1832
+ const deltaInPktTb = avRescaleQ(delta, AV_TIME_BASE_Q, packet.timeBase);
1833
+ packet.dts -= deltaInPktTb;
1834
+ if (packet.pts !== AV_NOPTS_VALUE) {
1835
+ packet.pts -= deltaInPktTb;
1836
+ }
1837
+ }
1838
+ }
1839
+ else {
1840
+ // Continuous format (e.g., MP4) - mark invalid timestamps
1841
+ const threshold = BigInt(this.options.dtsErrorThreshold) * BigInt(AV_TIME_BASE);
1842
+ // Check DTS
1843
+ if (delta > threshold || delta < -threshold) {
1844
+ packet.dts = AV_NOPTS_VALUE;
1845
+ }
1846
+ // Check PTS
1847
+ if (packet.pts !== AV_NOPTS_VALUE) {
1848
+ const pktPts = avRescaleQ(packet.pts, packet.timeBase, AV_TIME_BASE_Q);
1849
+ const ptsDelta = pktPts - state.nextDts;
1850
+ if (ptsDelta > threshold || ptsDelta < -threshold) {
1851
+ packet.pts = AV_NOPTS_VALUE;
1852
+ }
1853
+ }
1854
+ }
1855
+ }
1856
+ else if (state.nextDts === AV_NOPTS_VALUE && !this.options.copyTs && fmtIsDiscont && this.lastTs !== AV_NOPTS_VALUE) {
1857
+ // Inter-stream discontinuity detection
1858
+ const delta = pktDts - this.lastTs;
1859
+ const threshold = BigInt(this.options.dtsDeltaThreshold) * BigInt(AV_TIME_BASE);
1860
+ if (delta > threshold || delta < -threshold) {
1861
+ this.tsOffsetDiscont -= delta;
1862
+ // Apply correction to packet
1863
+ const deltaInPktTb = avRescaleQ(delta, AV_TIME_BASE_Q, packet.timeBase);
1864
+ packet.dts -= deltaInPktTb;
1865
+ if (packet.pts !== AV_NOPTS_VALUE) {
1866
+ packet.pts -= deltaInPktTb;
1867
+ }
1868
+ }
1869
+ }
1870
+ // Update last timestamp
1871
+ this.lastTs = avRescaleQ(packet.dts, packet.timeBase, AV_TIME_BASE_Q);
1872
+ }
1873
+ /**
1874
+ * Timestamp Discontinuity Processing - main entry point.
1875
+ *
1876
+ * Based on FFmpeg's ts_discontinuity_process().
1877
+ *
1878
+ * Applies accumulated discontinuity offset and detects new discontinuities.
1879
+ * Must be called for every packet before other timestamp processing.
1880
+ *
1881
+ * Handles:
1882
+ * - Applying previously-detected offset to all streams
1883
+ * - Detecting new discontinuities for audio/video streams
1884
+ *
1885
+ * @param packet - Packet to process
1886
+ *
1887
+ * @param stream - Stream metadata
1888
+ *
1889
+ * @internal
1890
+ */
1891
+ timestampDiscontinuityProcess(packet, stream) {
1892
+ // Apply previously-detected discontinuity offset
1893
+ // This applies to ALL streams, not just audio/video
1894
+ const offset = avRescaleQ(this.tsOffsetDiscont, AV_TIME_BASE_Q, packet.timeBase);
1895
+ if (packet.dts !== AV_NOPTS_VALUE) {
1896
+ packet.dts += offset;
1897
+ }
1898
+ if (packet.pts !== AV_NOPTS_VALUE) {
1899
+ packet.pts += offset;
1900
+ }
1901
+ // Detect new timestamp discontinuities for audio/video
1902
+ const par = stream.codecpar;
1903
+ if ((par.codecType === AVMEDIA_TYPE_VIDEO || par.codecType === AVMEDIA_TYPE_AUDIO) && packet.dts !== AV_NOPTS_VALUE) {
1904
+ this.timestampDiscontinuityDetect(packet, stream);
1905
+ }
1906
+ }
1907
+ /**
1908
+ * Close demuxer and free resources.
1909
+ *
1910
+ * Releases format context and I/O context.
1911
+ * Safe to call multiple times.
1912
+ * Automatically called by Symbol.asyncDispose.
1913
+ *
1914
+ * Direct mapping to avformat_close_input().
1915
+ *
1916
+ * @example
1917
+ * ```typescript
1918
+ * const input = await Demuxer.open('video.mp4');
1919
+ * try {
1920
+ * // Use input
1921
+ * } finally {
1922
+ * await input.close();
1923
+ * }
1924
+ * ```
1925
+ *
1926
+ * @see {@link Symbol.asyncDispose} For automatic cleanup
1927
+ */
1928
+ async close() {
1929
+ if (this.isClosed) {
1930
+ return;
1931
+ }
1932
+ this.isClosed = true;
1933
+ // Clean up abort signal listener
1934
+ this.signalCleanup?.();
1935
+ this.signalCleanup = undefined;
1936
+ // Signal demux thread to stop FIRST
1937
+ this.demuxThreadActive = false;
1938
+ // Set EOF flag so generators know to exit
1939
+ this.demuxEof = true;
1940
+ // Wake up all waiting generators BEFORE closing format context
1941
+ // This ensures generators can exit cleanly even if readFrame() is blocking
1942
+ for (const resolve of this.queueResolvers.values()) {
1943
+ resolve();
1944
+ }
1945
+ this.queueResolvers.clear();
1946
+ // Clear pb reference to prevent use-after-free
1947
+ if (this.ioContext) {
1948
+ this.formatContext.pb = null;
1949
+ }
1950
+ // Close FormatContext - this may interrupt blocking readFrame()
1951
+ await this.formatContext.closeInput();
1952
+ // Wait for demux thread with timeout to avoid hanging on blocked reads
1953
+ if (this.demuxThread) {
1954
+ const threadPromise = this.demuxThread;
1955
+ let timer;
1956
+ const timeoutPromise = new Promise((resolve) => {
1957
+ timer = setTimeout(resolve, 2000);
1958
+ timer.unref();
1959
+ });
1960
+ await Promise.race([threadPromise, timeoutPromise]);
1961
+ clearTimeout(timer);
1962
+ this.demuxThread = null;
1963
+ }
1964
+ // Clean up packet queues
1965
+ for (const queue of this.packetQueues.values()) {
1966
+ for (const packet of queue) {
1967
+ packet.free();
1968
+ }
1969
+ queue.length = 0;
1970
+ }
1971
+ this.packetQueues.clear();
1972
+ // NOW we can safely free the IOContext
1973
+ if (this.ioContext) {
1974
+ this.ioContext.freeContext();
1975
+ this.ioContext = undefined;
1976
+ }
1977
+ }
1978
+ /**
1979
+ * Close demuxer and free resources synchronously.
1980
+ * Synchronous version of close.
1981
+ *
1982
+ * Releases format context and I/O context.
1983
+ * Safe to call multiple times.
1984
+ * Automatically called by Symbol.dispose.
1985
+ *
1986
+ * Direct mapping to avformat_close_input().
1987
+ *
1988
+ * @example
1989
+ * ```typescript
1990
+ * const input = Demuxer.openSync('video.mp4');
1991
+ * try {
1992
+ * // Use input
1993
+ * } finally {
1994
+ * input.closeSync();
1995
+ * }
1996
+ * ```
1997
+ *
1998
+ * @see {@link close} For async version
1999
+ */
2000
+ closeSync() {
2001
+ if (this.isClosed) {
2002
+ return;
2003
+ }
2004
+ this.isClosed = true;
2005
+ // Clean up abort signal listener
2006
+ this.signalCleanup?.();
2007
+ this.signalCleanup = undefined;
2008
+ // IMPORTANT: Clear pb reference FIRST to prevent use-after-free
2009
+ if (this.ioContext) {
2010
+ this.formatContext.pb = null;
2011
+ }
2012
+ // Close FormatContext
2013
+ this.formatContext.closeInputSync();
2014
+ this.demuxThreadActive = false;
2015
+ for (const queue of this.packetQueues.values()) {
2016
+ for (const packet of queue) {
2017
+ packet.free();
2018
+ }
2019
+ queue.length = 0;
2020
+ }
2021
+ this.packetQueues.clear();
2022
+ this.queueResolvers.clear();
2023
+ this.demuxEof = false;
2024
+ // NOW we can safely free the IOContext
2025
+ if (this.ioContext) {
2026
+ this.ioContext.freeContext();
2027
+ this.ioContext = undefined;
2028
+ }
2029
+ }
2030
+ /**
2031
+ * Get underlying format context.
2032
+ *
2033
+ * Returns the internal format context for advanced operations.
2034
+ *
2035
+ * @returns Format context
2036
+ *
2037
+ * @internal
2038
+ */
2039
+ getFormatContext() {
2040
+ return this.formatContext;
2041
+ }
2042
+ /**
2043
+ * Dispose of demuxer.
2044
+ *
2045
+ * Implements AsyncDisposable interface for automatic cleanup.
2046
+ * Equivalent to calling close().
2047
+ *
2048
+ * @example
2049
+ * ```typescript
2050
+ * {
2051
+ * await using input = await Demuxer.open('video.mp4');
2052
+ * // Process media...
2053
+ * } // Automatically closed
2054
+ * ```
2055
+ *
2056
+ * @see {@link close} For manual cleanup
2057
+ */
2058
+ async [Symbol.asyncDispose]() {
2059
+ await this.close();
2060
+ }
2061
+ /**
2062
+ * Dispose of demuxer synchronously.
2063
+ *
2064
+ * Implements Disposable interface for automatic cleanup.
2065
+ * Equivalent to calling closeSync().
2066
+ *
2067
+ * @example
2068
+ * ```typescript
2069
+ * {
2070
+ * using input = Demuxer.openSync('video.mp4');
2071
+ * // Process media...
2072
+ * } // Automatically closed
2073
+ * ```
2074
+ *
2075
+ * @see {@link closeSync} For manual cleanup
2076
+ */
2077
+ [Symbol.dispose]() {
2078
+ this.closeSync();
2079
+ }
2080
+ }
2081
+ //# sourceMappingURL=demuxer.js.map