@moq/publish 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (98) hide show
  1. package/README.md +100 -0
  2. package/audio/capture-worklet.d.ts +2 -0
  3. package/audio/capture-worklet.d.ts.map +1 -0
  4. package/audio/capture.d.ts +6 -0
  5. package/audio/capture.d.ts.map +1 -0
  6. package/audio/encoder.d.ts +31 -0
  7. package/audio/encoder.d.ts.map +1 -0
  8. package/audio/index.d.ts +3 -0
  9. package/audio/index.d.ts.map +1 -0
  10. package/audio/types.d.ts +18 -0
  11. package/audio/types.d.ts.map +1 -0
  12. package/broadcast.d.ts +36 -0
  13. package/broadcast.d.ts.map +1 -0
  14. package/chat/index.d.ts +19 -0
  15. package/chat/index.d.ts.map +1 -0
  16. package/chat/message.d.ts +19 -0
  17. package/chat/message.d.ts.map +1 -0
  18. package/chat/typing.d.ts +19 -0
  19. package/chat/typing.d.ts.map +1 -0
  20. package/element.d.ts +32 -0
  21. package/element.d.ts.map +1 -0
  22. package/element.js +141 -0
  23. package/element.js.map +1 -0
  24. package/index.d.ts +9 -0
  25. package/index.d.ts.map +1 -0
  26. package/index.js +24 -0
  27. package/index.js.map +1 -0
  28. package/libav-opus-af-BlMWboA7.js +368 -0
  29. package/libav-opus-af-BlMWboA7.js.map +1 -0
  30. package/location/index.d.ts +33 -0
  31. package/location/index.d.ts.map +1 -0
  32. package/location/peers.d.ts +26 -0
  33. package/location/peers.d.ts.map +1 -0
  34. package/location/window.d.ts +35 -0
  35. package/location/window.d.ts.map +1 -0
  36. package/main-DGBFe0O7.js +2301 -0
  37. package/main-DGBFe0O7.js.map +1 -0
  38. package/package.json +41 -0
  39. package/preview.d.ts +21 -0
  40. package/preview.d.ts.map +1 -0
  41. package/screen-BCioRZHS.js +6579 -0
  42. package/screen-BCioRZHS.js.map +1 -0
  43. package/source/camera.d.ts +19 -0
  44. package/source/camera.d.ts.map +1 -0
  45. package/source/device.d.ts +19 -0
  46. package/source/device.d.ts.map +1 -0
  47. package/source/file.d.ts +20 -0
  48. package/source/file.d.ts.map +1 -0
  49. package/source/index.d.ts +6 -0
  50. package/source/index.d.ts.map +1 -0
  51. package/source/microphone.d.ts +19 -0
  52. package/source/microphone.d.ts.map +1 -0
  53. package/source/screen.d.ts +22 -0
  54. package/source/screen.d.ts.map +1 -0
  55. package/support/element.d.ts +22 -0
  56. package/support/element.d.ts.map +1 -0
  57. package/support/element.js +184 -0
  58. package/support/element.js.map +1 -0
  59. package/support/index.d.ts +29 -0
  60. package/support/index.d.ts.map +1 -0
  61. package/support/index.js +63 -0
  62. package/support/index.js.map +1 -0
  63. package/ui/components/CameraSourceButton.d.ts +2 -0
  64. package/ui/components/CameraSourceButton.d.ts.map +1 -0
  65. package/ui/components/FileSourceButton.d.ts +2 -0
  66. package/ui/components/FileSourceButton.d.ts.map +1 -0
  67. package/ui/components/MediaSourceSelector.d.ts +8 -0
  68. package/ui/components/MediaSourceSelector.d.ts.map +1 -0
  69. package/ui/components/MicrophoneSourceButton.d.ts +2 -0
  70. package/ui/components/MicrophoneSourceButton.d.ts.map +1 -0
  71. package/ui/components/NothingSourceButton.d.ts +2 -0
  72. package/ui/components/NothingSourceButton.d.ts.map +1 -0
  73. package/ui/components/PublishControls.d.ts +2 -0
  74. package/ui/components/PublishControls.d.ts.map +1 -0
  75. package/ui/components/PublishStatusIndicator.d.ts +2 -0
  76. package/ui/components/PublishStatusIndicator.d.ts.map +1 -0
  77. package/ui/components/ScreenSourceButton.d.ts +2 -0
  78. package/ui/components/ScreenSourceButton.d.ts.map +1 -0
  79. package/ui/context.d.ts +25 -0
  80. package/ui/context.d.ts.map +1 -0
  81. package/ui/element.d.ts +5 -0
  82. package/ui/element.d.ts.map +1 -0
  83. package/ui/hooks/use-publish-ui.d.ts +15 -0
  84. package/ui/hooks/use-publish-ui.d.ts.map +1 -0
  85. package/ui/index.d.ts +7 -0
  86. package/ui/index.d.ts.map +1 -0
  87. package/ui/index.js +974 -0
  88. package/ui/index.js.map +1 -0
  89. package/user.d.ts +25 -0
  90. package/user.d.ts.map +1 -0
  91. package/video/encoder.d.ts +31 -0
  92. package/video/encoder.d.ts.map +1 -0
  93. package/video/index.d.ts +85 -0
  94. package/video/index.d.ts.map +1 -0
  95. package/video/polyfill.d.ts +3 -0
  96. package/video/polyfill.d.ts.map +1 -0
  97. package/video/types.d.ts +20 -0
  98. package/video/types.d.ts.map +1 -0
@@ -0,0 +1,2301 @@
1
+ let T = class {
2
+ constructor(e) {
3
+ {
4
+ this.type = e.type, this.timestamp = e.timestamp, typeof e.duration == "number" ? this.duration = e.duration : this.duration = null, this.byteLength = e.data.byteLength;
5
+ let t = !1;
6
+ if (e.transfer) {
7
+ let s;
8
+ e.data.buffer ? s = e.data.buffer : s = e.data;
9
+ let r;
10
+ e.transfer instanceof Array ? r = e.transfer : r = Array.from(e.transfer);
11
+ for (const a of r)
12
+ if (a === s) {
13
+ t = !0;
14
+ break;
15
+ }
16
+ }
17
+ const i = new Uint8Array(e.data.buffer || e.data, e.data.byteOffset || 0, e.data.BYTES_PER_ELEMENT ? e.data.BYTES_PER_ELEMENT * e.data.length : e.data.byteLength);
18
+ t ? this._data = i : this._data = i.slice(0);
19
+ }
20
+ }
21
+ // Internal
22
+ _libavGetData() {
23
+ return this._data;
24
+ }
25
+ copyTo(e) {
26
+ new Uint8Array(e.buffer || e, e.byteOffset || 0).set(this._data);
27
+ }
28
+ };
29
+ (function(o) {
30
+ typeof globalThis != "object" && (this ? e() : (o.defineProperty(o.prototype, "_T_", {
31
+ configurable: !0,
32
+ get: e
33
+ }), _T_));
34
+ function e() {
35
+ var t = this || self;
36
+ t.globalThis = t, delete o.prototype._T_;
37
+ }
38
+ })(Object);
39
+ let B = class G {
40
+ constructor(e) {
41
+ G._checkValidAudioDataInit(e);
42
+ {
43
+ this.format = e.format, this.sampleRate = e.sampleRate, this.numberOfFrames = e.numberOfFrames, this.numberOfChannels = e.numberOfChannels, this.timestamp = e.timestamp;
44
+ let t = !1;
45
+ if (e.transfer) {
46
+ let a;
47
+ e.data.buffer ? a = e.data.buffer : a = e.data;
48
+ let n;
49
+ e.transfer instanceof Array ? n = e.transfer : n = Array.from(e.transfer);
50
+ for (const d of n)
51
+ if (d === a) {
52
+ t = !0;
53
+ break;
54
+ }
55
+ }
56
+ let i, s = 0;
57
+ t ? (i = e.data, s = e.data.byteOffset || 0) : i = e.data.slice(0);
58
+ const r = Z(e.format, i.buffer || i, s);
59
+ this._data = r;
60
+ }
61
+ this.duration = e.numberOfFrames / e.sampleRate * 1e6;
62
+ }
63
+ /**
64
+ * Convert a polyfill AudioData to a native AudioData.
65
+ * @param opts Conversion options
66
+ */
67
+ toNative(e = {}) {
68
+ const t = new globalThis.AudioData({
69
+ data: this._data,
70
+ format: this.format,
71
+ sampleRate: this.sampleRate,
72
+ numberOfFrames: this.numberOfFrames,
73
+ numberOfChannels: this.numberOfChannels,
74
+ timestamp: this.timestamp,
75
+ transfer: e.transfer ? [this._data.buffer] : []
76
+ });
77
+ return e.transfer && this.close(), t;
78
+ }
79
+ /**
80
+ * Convert a native AudioData to a polyfill AudioData. WARNING: Inefficient,
81
+ * as the data cannot be transferred out.
82
+ * @param from AudioData to copy in
83
+ */
84
+ static fromNative(e) {
85
+ const t = e, s = F(t.format) ? 1 : t.numberOfChannels, r = t.allocationSize({
86
+ format: t.format,
87
+ planeIndex: 0
88
+ }), a = new Uint8Array(r * s);
89
+ for (let n = 0; n < s; n++)
90
+ t.copyTo(a.subarray(n * r), {
91
+ format: t.format,
92
+ planeIndex: n
93
+ });
94
+ return new G({
95
+ data: a,
96
+ format: t.format,
97
+ sampleRate: t.sampleRate,
98
+ numberOfFrames: t.numberOfFrames,
99
+ numberOfChannels: t.numberOfChannels,
100
+ timestamp: t.timestamp,
101
+ transfer: [a.buffer]
102
+ });
103
+ }
104
+ // Internal
105
+ _libavGetData() {
106
+ return this._data;
107
+ }
108
+ static _checkValidAudioDataInit(e) {
109
+ if (e.sampleRate <= 0)
110
+ throw new TypeError(`Invalid sample rate ${e.sampleRate}`);
111
+ if (e.numberOfFrames <= 0)
112
+ throw new TypeError(`Invalid number of frames ${e.numberOfFrames}`);
113
+ if (e.numberOfChannels <= 0)
114
+ throw new TypeError(`Invalid number of channels ${e.numberOfChannels}`);
115
+ {
116
+ const t = e.numberOfFrames * e.numberOfChannels, s = ee(e.format) * t;
117
+ if (e.data.byteLength < s)
118
+ throw new TypeError(`This audio data must be at least ${s} bytes`);
119
+ }
120
+ }
121
+ allocationSize(e) {
122
+ if (this._data === null)
123
+ throw new DOMException("Detached", "InvalidStateError");
124
+ const t = this._computeCopyElementCount(e);
125
+ let i = this.format;
126
+ return e.format && (i = e.format), ee(i) * t;
127
+ }
128
+ _computeCopyElementCount(e) {
129
+ let t = this.format;
130
+ e.format && (t = e.format);
131
+ const i = F(t);
132
+ if (i) {
133
+ if (e.planeIndex > 0)
134
+ throw new RangeError("Invalid plane");
135
+ } else if (e.planeIndex >= this.numberOfChannels)
136
+ throw new RangeError("Invalid plane");
137
+ if (this.format !== t && t !== "f32-planar")
138
+ throw new DOMException("Only conversion to f32-planar is supported", "NotSupportedError");
139
+ const s = this.numberOfFrames, r = e.frameOffset || 0;
140
+ if (r >= s)
141
+ throw new RangeError("Frame offset out of range");
142
+ let a = s - r;
143
+ if (typeof e.frameCount == "number") {
144
+ if (e.frameCount >= a)
145
+ throw new RangeError("Frame count out of range");
146
+ a = e.frameCount;
147
+ }
148
+ let n = a;
149
+ return i && (n *= this.numberOfChannels), n;
150
+ }
151
+ copyTo(e, t) {
152
+ if (this._data === null)
153
+ throw new DOMException("Detached", "InvalidStateError");
154
+ const i = this._computeCopyElementCount(t);
155
+ let s = this.format;
156
+ if (t.format && (s = t.format), ee(s) * i > e.byteLength)
157
+ throw new RangeError("Buffer too small");
158
+ const n = this._data.subarray(t.planeIndex * this.numberOfFrames), d = t.frameOffset || 0, l = this.numberOfChannels;
159
+ if (this.format === s) {
160
+ const c = Z(s, e.buffer || e, e.byteOffset || 0);
161
+ F(s) ? c.set(n.subarray(d * l, d * l + i)) : c.set(n.subarray(d, d + i));
162
+ } else {
163
+ const c = Z(s, e.buffer || e, e.byteOffset || 0);
164
+ let h = 0, f = 1;
165
+ switch (this.format) {
166
+ case "u8":
167
+ case "u8-planar":
168
+ h = 128, f = 128;
169
+ break;
170
+ case "s16":
171
+ case "s16-planar":
172
+ f = 32768;
173
+ break;
174
+ case "s32":
175
+ case "s32-planar":
176
+ f = 2147483648;
177
+ break;
178
+ }
179
+ if (F(this.format))
180
+ for (let u = t.planeIndex + d * l, _ = 0; _ < i; u += l, _++)
181
+ c[_] = (n[u] - h) / f;
182
+ else
183
+ for (let u = d, _ = 0; _ < i; u++, _++)
184
+ c[_] = (n[u] - h) / f;
185
+ }
186
+ }
187
+ clone() {
188
+ if (this._data === null)
189
+ throw new DOMException("Detached", "InvalidStateError");
190
+ return new G({
191
+ format: this.format,
192
+ sampleRate: this.sampleRate,
193
+ numberOfFrames: this.numberOfFrames,
194
+ numberOfChannels: this.numberOfChannels,
195
+ timestamp: this.timestamp,
196
+ data: this._data
197
+ });
198
+ }
199
+ close() {
200
+ this._data = null;
201
+ }
202
+ };
203
+ function Z(o, e, t) {
204
+ switch (o) {
205
+ case "u8":
206
+ case "u8-planar":
207
+ return new Uint8Array(e, t);
208
+ case "s16":
209
+ case "s16-planar":
210
+ return new Int16Array(e, t);
211
+ case "s32":
212
+ case "s32-planar":
213
+ return new Int32Array(e, t);
214
+ case "f32":
215
+ case "f32-planar":
216
+ return new Float32Array(e, t);
217
+ default:
218
+ throw new TypeError("Invalid AudioSampleFormat");
219
+ }
220
+ }
221
+ function ee(o) {
222
+ switch (o) {
223
+ case "u8":
224
+ case "u8-planar":
225
+ return 1;
226
+ case "s16":
227
+ case "s16-planar":
228
+ return 2;
229
+ case "s32":
230
+ case "s32-planar":
231
+ case "f32":
232
+ case "f32-planar":
233
+ return 4;
234
+ default:
235
+ throw new TypeError("Invalid AudioSampleFormat");
236
+ }
237
+ }
238
+ function F(o) {
239
+ switch (o) {
240
+ case "u8":
241
+ case "s16":
242
+ case "s32":
243
+ case "f32":
244
+ return !0;
245
+ case "u8-planar":
246
+ case "s16-planar":
247
+ case "s32-planar":
248
+ case "f32-planar":
249
+ return !1;
250
+ default:
251
+ throw new TypeError("Invalid AudioSampleFormat");
252
+ }
253
+ }
254
+ class pe {
255
+ constructor() {
256
+ const e = this._eventer = new EventTarget();
257
+ this.addEventListener = e.addEventListener.bind(e), this.removeEventListener = e.removeEventListener.bind(e), this.dispatchEvent = e.dispatchEvent.bind(e);
258
+ }
259
+ }
260
+ class Q extends pe {
261
+ constructor() {
262
+ super(), this.addEventListener("dequeue", (e) => {
263
+ this.ondequeue && this.ondequeue(e);
264
+ });
265
+ }
266
+ }
267
+ var re = function(o, e, t, i) {
268
+ function s(r) {
269
+ return r instanceof t ? r : new t(function(a) {
270
+ a(r);
271
+ });
272
+ }
273
+ return new (t || (t = Promise))(function(r, a) {
274
+ function n(c) {
275
+ try {
276
+ l(i.next(c));
277
+ } catch (h) {
278
+ a(h);
279
+ }
280
+ }
281
+ function d(c) {
282
+ try {
283
+ l(i.throw(c));
284
+ } catch (h) {
285
+ a(h);
286
+ }
287
+ }
288
+ function l(c) {
289
+ c.done ? r(c.value) : s(c.value).then(n, d);
290
+ }
291
+ l((i = i.apply(o, e || [])).next());
292
+ });
293
+ };
294
+ let C = null;
295
+ const ie = [];
296
+ let de = {}, le = null, he = null;
297
+ function me(o) {
298
+ C = o;
299
+ }
300
+ function ye(o) {
301
+ de = o;
302
+ }
303
+ function I() {
304
+ return re(this, void 0, void 0, function* () {
305
+ return ie.length ? ie.shift() : yield C.LibAV(de);
306
+ });
307
+ }
308
+ function g(o) {
309
+ ie.push(o);
310
+ }
311
+ function ce(o) {
312
+ return re(this, void 0, void 0, function* () {
313
+ const e = yield I(), t = [];
314
+ for (const [i, s] of [
315
+ ["flac", "flac"],
316
+ ["libopus", "opus"],
317
+ ["libvorbis", "vorbis"],
318
+ ["libaom-av1", "av01"],
319
+ ["libvpx-vp9", "vp09"],
320
+ ["libvpx", "vp8"]
321
+ ])
322
+ o ? (yield e.avcodec_find_encoder_by_name(i)) && t.push(s) : (yield e.avcodec_find_decoder_by_name(i)) && t.push(s);
323
+ return g(e), t;
324
+ });
325
+ }
326
+ function be() {
327
+ return re(this, void 0, void 0, function* () {
328
+ C = C || LibAV, le = yield ce(!1), he = yield ce(!0);
329
+ });
330
+ }
331
+ function U(o, e) {
332
+ if (typeof o == "string") {
333
+ o = o.replace(/\..*/, "");
334
+ let t = o;
335
+ switch (o) {
336
+ // Audio
337
+ case "flac":
338
+ if (typeof e.description > "u")
339
+ return null;
340
+ break;
341
+ case "opus":
342
+ if (typeof e.description < "u")
343
+ return null;
344
+ t = "libopus";
345
+ break;
346
+ case "vorbis":
347
+ if (typeof e.description > "u")
348
+ return null;
349
+ t = "libvorbis";
350
+ break;
351
+ // Video
352
+ case "av01":
353
+ t = "libaom-av1";
354
+ break;
355
+ case "vp09":
356
+ t = "libvpx-vp9";
357
+ break;
358
+ case "vp8":
359
+ t = "libvpx";
360
+ break;
361
+ // Unsupported
362
+ case "mp3":
363
+ case "mp4a":
364
+ case "ulaw":
365
+ case "alaw":
366
+ case "avc1":
367
+ case "avc3":
368
+ case "hev1":
369
+ case "hvc1":
370
+ return null;
371
+ // Unrecognized
372
+ default:
373
+ throw new TypeError("Unrecognized codec");
374
+ }
375
+ return le.indexOf(o) >= 0 ? { codec: t } : null;
376
+ } else
377
+ return o.libavjs;
378
+ }
379
+ function N(o, e) {
380
+ if (typeof o == "string") {
381
+ const t = o.split(".");
382
+ o = t[0];
383
+ let i = o;
384
+ const s = {}, r = {};
385
+ let a = !1;
386
+ switch (o) {
387
+ // Audio
388
+ case "flac":
389
+ if (s.sample_fmt = 2, s.bit_rate = 0, typeof e.flac == "object" && e.flac !== null) {
390
+ const n = e.flac;
391
+ if (typeof n.blockSize == "number" && (s.frame_size = n.blockSize), typeof n.compressLevel == "number")
392
+ return null;
393
+ }
394
+ break;
395
+ case "opus":
396
+ if (i = "libopus", s.sample_fmt = 3, s.sample_rate = 48e3, typeof e.opus == "object" && e.opus !== null) {
397
+ const n = e.opus;
398
+ if (typeof n.frameDuration == "number" && (r.frame_duration = "" + n.frameDuration / 1e3), typeof n.complexity < "u")
399
+ return null;
400
+ if (typeof n.packetlossperc == "number") {
401
+ if (n.packetlossperc < 0 || n.packetlossperc > 100)
402
+ return null;
403
+ r.packet_loss = "" + n.packetlossperc;
404
+ }
405
+ if (typeof n.useinbandfec == "boolean" && (r.fec = n.useinbandfec ? "1" : "0"), typeof n.usedtx == "boolean" || typeof n.format == "string" && n.format !== "opus")
406
+ return null;
407
+ }
408
+ break;
409
+ case "vorbis":
410
+ i = "libvorbis", s.sample_fmt = 8;
411
+ break;
412
+ // Video
413
+ case "av01":
414
+ if (a = !0, i = "libaom-av1", e.latencyMode === "realtime" && (r.usage = "realtime", r["cpu-used"] = "8"), !ve(t, s))
415
+ return null;
416
+ break;
417
+ case "vp09":
418
+ if (a = !0, i = "libvpx-vp9", e.latencyMode === "realtime" && (r.quality = "realtime", r["cpu-used"] = "8"), !we(t, s))
419
+ return null;
420
+ break;
421
+ case "vp8":
422
+ a = !0, i = "libvpx", e.latencyMode === "realtime" && (r.quality = "realtime", r["cpu-used"] = "8");
423
+ break;
424
+ // Unsupported
425
+ case "mp3":
426
+ case "mp4a":
427
+ case "ulaw":
428
+ case "alaw":
429
+ case "avc1":
430
+ return null;
431
+ // Unrecognized
432
+ default:
433
+ throw new TypeError("Unrecognized codec");
434
+ }
435
+ if (!(he.indexOf(o) >= 0))
436
+ return null;
437
+ if (a) {
438
+ typeof s.pix_fmt != "number" && (s.pix_fmt = 0);
439
+ const n = s.width = e.width, d = s.height = e.height;
440
+ e.framerate && (s.framerate_num = Math.round(e.framerate), s.framerate_den = 1);
441
+ const l = e.displayWidth || e.width, c = e.displayHeight || e.height;
442
+ (l !== n || c !== d) && (s.sample_aspect_ratio_num = l * d, s.sample_aspect_ratio_den = c * n);
443
+ } else if (s.sample_rate || (s.sample_rate = e.sampleRate || 48e3), e.numberOfChannels) {
444
+ const n = e.numberOfChannels;
445
+ s.channel_layout = n === 1 ? 4 : (1 << n) - 1;
446
+ }
447
+ return typeof s.bit_rate != "number" && e.bitrate && (s.bit_rate = e.bitrate), {
448
+ codec: i,
449
+ ctx: s,
450
+ options: r
451
+ };
452
+ } else
453
+ return o.libavjs;
454
+ }
455
+ function ve(o, e) {
456
+ if (o[1]) {
457
+ const t = +o[1];
458
+ if (t >= 0 && t <= 2)
459
+ e.profile = t;
460
+ else
461
+ throw new TypeError("Invalid AV1 profile");
462
+ }
463
+ if (o[2]) {
464
+ const t = +o[2];
465
+ if (t >= 0 && t <= 23)
466
+ e.level = t;
467
+ else
468
+ throw new TypeError("Invalid AV1 level");
469
+ }
470
+ if (o[3])
471
+ switch (o[3]) {
472
+ case "M":
473
+ break;
474
+ case "H":
475
+ if (e.level && e.level >= 8)
476
+ return !1;
477
+ throw new TypeError("The AV1 high tier is only available for level 4.0 and up");
478
+ default:
479
+ throw new TypeError("Invalid AV1 tier");
480
+ }
481
+ if (o[4]) {
482
+ const t = +o[3];
483
+ if (t === 10 || t === 12)
484
+ return !1;
485
+ if (t !== 8)
486
+ throw new TypeError("Invalid AV1 bit depth");
487
+ }
488
+ if (o[5])
489
+ switch (o[5]) {
490
+ case "0":
491
+ break;
492
+ case "1":
493
+ return !1;
494
+ default:
495
+ throw new TypeError("Invalid AV1 monochrome flag");
496
+ }
497
+ if (o[6])
498
+ switch (o[6]) {
499
+ case "000":
500
+ e.pix_fmt = 5;
501
+ break;
502
+ case "100":
503
+ e.pix_fmt = 4;
504
+ break;
505
+ case "110":
506
+ e.pix_fmt = 0;
507
+ break;
508
+ case "111":
509
+ return !1;
510
+ default:
511
+ throw new TypeError("Invalid AV1 subsampling mode");
512
+ }
513
+ return !0;
514
+ }
515
+ function we(o, e) {
516
+ if (o[1]) {
517
+ const t = +o[1];
518
+ if (t >= 0 && t <= 3)
519
+ e.profile = t;
520
+ else
521
+ throw new TypeError("Invalid VP9 profile");
522
+ }
523
+ if (o[2]) {
524
+ const t = [+o[2][0], +o[2][1]];
525
+ if (t[0] >= 1 && t[0] <= 4) {
526
+ if (!(t[1] >= 0 && t[1] <= 1)) throw new TypeError("Invalid VP9 level");
527
+ } else if (t[0] >= 5 && t[0] <= 6) {
528
+ if (!(t[1] >= 0 && t[1] <= 2)) throw new TypeError("Invalid VP9 level");
529
+ } else
530
+ throw new TypeError("Invalid VP9 level");
531
+ e.level = +o[2];
532
+ }
533
+ if (o[3]) {
534
+ const t = +o[3];
535
+ if (t === 10 || t === 12)
536
+ return !1;
537
+ if (t !== 8)
538
+ throw new TypeError("Invalid VP9 bit depth");
539
+ }
540
+ if (o[4])
541
+ switch (+o[4]) {
542
+ case 0:
543
+ case 1:
544
+ e.pix_fmt = 0;
545
+ break;
546
+ case 2:
547
+ e.pix_fmt = 4;
548
+ break;
549
+ case 3:
550
+ e.pix_fmt = 5;
551
+ break;
552
+ default:
553
+ throw new TypeError("Invalid VP9 chroma subsampling format");
554
+ }
555
+ return !0;
556
+ }
557
+ function Y(o, e) {
558
+ const t = {};
559
+ for (const i of e)
560
+ i in o && (t[i] = o[i]);
561
+ return t;
562
+ }
563
+ var M = function(o, e, t, i) {
564
+ function s(r) {
565
+ return r instanceof t ? r : new t(function(a) {
566
+ a(r);
567
+ });
568
+ }
569
+ return new (t || (t = Promise))(function(r, a) {
570
+ function n(c) {
571
+ try {
572
+ l(i.next(c));
573
+ } catch (h) {
574
+ a(h);
575
+ }
576
+ }
577
+ function d(c) {
578
+ try {
579
+ l(i.throw(c));
580
+ } catch (h) {
581
+ a(h);
582
+ }
583
+ }
584
+ function l(c) {
585
+ c.done ? r(c.value) : s(c.value).then(n, d);
586
+ }
587
+ l((i = i.apply(o, e || [])).next());
588
+ });
589
+ };
590
+ let $ = class extends Q {
591
+ constructor(e) {
592
+ super(), this._p = Promise.all([]), this._libav = null, this._codec = this._c = this._pkt = this._frame = 0, this._output = e.output, this._error = e.error, this.state = "unconfigured", this.decodeQueueSize = 0;
593
+ }
594
+ configure(e) {
595
+ if (this.state === "closed")
596
+ throw new DOMException("Decoder is closed", "InvalidStateError");
597
+ this._libav && (this._p = this._p.then(() => this._free())), this.state = "configured", this._p = this._p.then(() => M(this, void 0, void 0, function* () {
598
+ let t;
599
+ if (e.description)
600
+ if (ArrayBuffer.isView(e.description)) {
601
+ const l = e.description;
602
+ t = new Uint8Array(l.buffer, l.byteOffset, l.byteLength);
603
+ } else {
604
+ const l = e.description;
605
+ t = new Uint8Array(l);
606
+ }
607
+ const i = U(e.codec, e);
608
+ if (!i) {
609
+ this._closeAudioDecoder(new DOMException("Unsupported codec", "NotSupportedError"));
610
+ return;
611
+ }
612
+ const s = this._libav = yield I(), r = yield s.avcodec_parameters_alloc(), a = [
613
+ s.AVCodecParameters_channels_s(r, e.numberOfChannels),
614
+ s.AVCodecParameters_sample_rate_s(r, e.sampleRate),
615
+ s.AVCodecParameters_codec_type_s(
616
+ r,
617
+ 1
618
+ /* AVMEDIA_TYPE_AUDIO */
619
+ )
620
+ ];
621
+ let n = 0;
622
+ t ? (a.push(s.AVCodecParameters_extradata_size_s(r, t.byteLength)), n = yield s.calloc(t.byteLength + 64, 1), a.push(s.copyin_u8(n, t)), a.push(s.AVCodecParameters_extradata_s(r, n))) : (a.push(s.AVCodecParameters_extradata_s(r, 0)), a.push(s.AVCodecParameters_extradata_size_s(r, 0))), yield Promise.all(a), [this._codec, this._c, this._pkt, this._frame] = yield s.ff_init_decoder(i.codec, r);
623
+ const d = [
624
+ s.AVCodecContext_time_base_s(this._c, 1, 1e3),
625
+ s.avcodec_parameters_free_js(r)
626
+ ];
627
+ n && d.push(s.free(n)), yield Promise.all(d);
628
+ })).catch(this._error);
629
+ }
630
+ // Our own algorithm, close libav
631
+ _free() {
632
+ return M(this, void 0, void 0, function* () {
633
+ this._c && (yield this._libav.ff_free_decoder(this._c, this._pkt, this._frame), this._codec = this._c = this._pkt = this._frame = 0), this._libav && (g(this._libav), this._libav = null);
634
+ });
635
+ }
636
+ _closeAudioDecoder(e) {
637
+ this._resetAudioDecoder(e), this.state = "closed", this._p = this._p.then(() => this._free()), e.name !== "AbortError" && (this._p = this._p.then(() => {
638
+ this._error(e);
639
+ }));
640
+ }
641
+ _resetAudioDecoder(e) {
642
+ if (this.state === "closed")
643
+ throw new DOMException("Decoder closed", "InvalidStateError");
644
+ this.state = "unconfigured", this._p = this._p.then(() => this._free());
645
+ }
646
+ decode(e) {
647
+ if (this.state !== "configured")
648
+ throw new DOMException("Unconfigured", "InvalidStateError");
649
+ this.decodeQueueSize++, this._p = this._p.then(() => M(this, void 0, void 0, function* () {
650
+ const t = this._libav, i = this._c, s = this._pkt, r = this._frame;
651
+ let a = null;
652
+ this.decodeQueueSize--, this.dispatchEvent(new CustomEvent("dequeue"));
653
+ try {
654
+ const n = Math.floor(e.timestamp / 1e3), [d, l] = t.f64toi64(n), c = {
655
+ data: e._libavGetData(),
656
+ pts: d,
657
+ ptshi: l,
658
+ dts: d,
659
+ dtshi: l
660
+ };
661
+ e.duration && (c.duration = Math.floor(e.duration / 1e3), c.durationhi = 0), a = yield t.ff_decode_multi(i, s, r, [c]);
662
+ } catch (n) {
663
+ this._p = this._p.then(() => {
664
+ this._closeAudioDecoder(n);
665
+ });
666
+ return;
667
+ }
668
+ a && this._outputAudioData(a);
669
+ })).catch(this._error);
670
+ }
671
+ _outputAudioData(e) {
672
+ const t = this._libav;
673
+ for (const i of e) {
674
+ let s, r = !1;
675
+ switch (i.format) {
676
+ case t.AV_SAMPLE_FMT_U8:
677
+ s = "u8";
678
+ break;
679
+ case t.AV_SAMPLE_FMT_S16:
680
+ s = "s16";
681
+ break;
682
+ case t.AV_SAMPLE_FMT_S32:
683
+ s = "s32";
684
+ break;
685
+ case t.AV_SAMPLE_FMT_FLT:
686
+ s = "f32";
687
+ break;
688
+ case t.AV_SAMPLE_FMT_U8P:
689
+ s = "u8", r = !0;
690
+ break;
691
+ case t.AV_SAMPLE_FMT_S16P:
692
+ s = "s16", r = !0;
693
+ break;
694
+ case t.AV_SAMPLE_FMT_S32P:
695
+ s = "s32", r = !0;
696
+ break;
697
+ case t.AV_SAMPLE_FMT_FLTP:
698
+ s = "f32", r = !0;
699
+ break;
700
+ default:
701
+ throw new DOMException("Unsupported libav format!", "EncodingError");
702
+ }
703
+ const a = i.sample_rate, n = i.nb_samples, d = i.channels, l = t.i64tof64(i.pts, i.ptshi) * 1e3;
704
+ let c;
705
+ if (r) {
706
+ let f = 0;
707
+ for (let u = 0; u < i.data.length; u++)
708
+ f += i.data[u].length;
709
+ c = new i.data[0].constructor(f), f = 0;
710
+ for (let u = 0; u < i.data.length; u++) {
711
+ const _ = i.data[u];
712
+ c.set(_, f), f += _.length;
713
+ }
714
+ } else
715
+ c = i.data;
716
+ const h = new B({
717
+ format: s,
718
+ sampleRate: a,
719
+ numberOfFrames: n,
720
+ numberOfChannels: d,
721
+ timestamp: l,
722
+ data: c
723
+ });
724
+ this._output(h);
725
+ }
726
+ }
727
+ flush() {
728
+ if (this.state !== "configured")
729
+ throw new DOMException("Invalid state", "InvalidStateError");
730
+ const e = this._p.then(() => M(this, void 0, void 0, function* () {
731
+ if (!this._c)
732
+ return;
733
+ const t = this._libav, i = this._c, s = this._pkt, r = this._frame;
734
+ let a = null;
735
+ try {
736
+ a = yield t.ff_decode_multi(i, s, r, [], !0);
737
+ } catch (n) {
738
+ this._p = this._p.then(() => {
739
+ this._closeAudioDecoder(n);
740
+ });
741
+ }
742
+ a && this._outputAudioData(a);
743
+ }));
744
+ return this._p = e, e;
745
+ }
746
+ reset() {
747
+ this._resetAudioDecoder(new DOMException("Reset", "AbortError"));
748
+ }
749
+ close() {
750
+ this._closeAudioDecoder(new DOMException("Close", "AbortError"));
751
+ }
752
+ static isConfigSupported(e) {
753
+ return M(this, void 0, void 0, function* () {
754
+ const t = U(e.codec, e);
755
+ let i = !1;
756
+ if (t) {
757
+ const s = yield I();
758
+ try {
759
+ const [, r, a, n] = yield s.ff_init_decoder(t.codec);
760
+ yield s.ff_free_decoder(r, a, n), i = !0;
761
+ } catch {
762
+ }
763
+ yield g(s);
764
+ }
765
+ return {
766
+ supported: i,
767
+ config: Y(e, ["codec", "sampleRate", "numberOfChannels"])
768
+ };
769
+ });
770
+ }
771
+ };
772
+ var O = function(o, e, t, i) {
773
+ function s(r) {
774
+ return r instanceof t ? r : new t(function(a) {
775
+ a(r);
776
+ });
777
+ }
778
+ return new (t || (t = Promise))(function(r, a) {
779
+ function n(c) {
780
+ try {
781
+ l(i.next(c));
782
+ } catch (h) {
783
+ a(h);
784
+ }
785
+ }
786
+ function d(c) {
787
+ try {
788
+ l(i.throw(c));
789
+ } catch (h) {
790
+ a(h);
791
+ }
792
+ }
793
+ function l(c) {
794
+ c.done ? r(c.value) : s(c.value).then(n, d);
795
+ }
796
+ l((i = i.apply(o, e || [])).next());
797
+ });
798
+ };
799
+ let X = class extends Q {
800
+ constructor(e) {
801
+ super(), this._outputMetadata = null, this._outputMetadataFilled = !1, this._pts = null, this._p = Promise.all([]), this._libav = null, this._codec = this._c = this._frame = this._pkt = 0, this._filter_in_ctx = this._filter_out_ctx = null, this._filter_graph = this._buffersrc_ctx = this._buffersink_ctx = 0, this._output = e.output, this._error = e.error, this.state = "unconfigured", this.encodeQueueSize = 0;
802
+ }
803
+ configure(e) {
804
+ const t = this;
805
+ if (this.state === "closed")
806
+ throw new DOMException("Encoder is closed", "InvalidStateError");
807
+ this._libav && (this._p = this._p.then(() => this._free())), this.state = "configured", this._p = this._p.then(function() {
808
+ return O(this, void 0, void 0, function* () {
809
+ const i = N(e.codec, e);
810
+ if (t._outputMetadata = { decoderConfig: {
811
+ codec: e.codec,
812
+ // Rest will be filled in when we get data
813
+ sampleRate: 0,
814
+ numberOfChannels: 0
815
+ } }, t._outputMetadataFilled = !1, !i) {
816
+ t._closeAudioEncoder(new DOMException("Unsupported codec", "NotSupportedError"));
817
+ return;
818
+ }
819
+ const s = t._libav = yield I();
820
+ let r;
821
+ [t._codec, t._c, t._frame, t._pkt, r] = yield s.ff_init_encoder(i.codec, i), t._pts = null, yield s.AVCodecContext_time_base_s(t._c, 1, i.ctx.sample_rate), t._filter_out_ctx = {
822
+ sample_rate: i.ctx.sample_rate,
823
+ sample_fmt: i.ctx.sample_fmt,
824
+ channel_layout: i.ctx.channel_layout,
825
+ frame_size: r
826
+ };
827
+ });
828
+ }).catch(this._error);
829
+ }
830
+ // Our own algorithm, close libav
831
+ _free() {
832
+ return O(this, void 0, void 0, function* () {
833
+ this._filter_graph && (yield this._libav.avfilter_graph_free_js(this._filter_graph), this._filter_in_ctx = this._filter_out_ctx = null, this._filter_graph = this._buffersrc_ctx = this._buffersink_ctx = 0), this._c && (yield this._libav.ff_free_encoder(this._c, this._frame, this._pkt), this._codec = this._c = this._frame = this._pkt = 0), this._libav && (g(this._libav), this._libav = null);
834
+ });
835
+ }
836
+ _closeAudioEncoder(e) {
837
+ this._resetAudioEncoder(e), this.state = "closed", this._p = this._p.then(() => this._free()), e.name !== "AbortError" && (this._p = this._p.then(() => {
838
+ this._error(e);
839
+ }));
840
+ }
841
+ _resetAudioEncoder(e) {
842
+ if (this.state === "closed")
843
+ throw new DOMException("Encoder closed", "InvalidStateError");
844
+ this.state = "unconfigured", this._p = this._p.then(() => this._free());
845
+ }
846
+ encode(e) {
847
+ if (e._libavGetData() === null)
848
+ throw new TypeError("Detached");
849
+ if (this.state !== "configured")
850
+ throw new DOMException("Unconfigured", "InvalidStateError");
851
+ const t = e.clone();
852
+ this.encodeQueueSize++, this._p = this._p.then(() => O(this, void 0, void 0, function* () {
853
+ const i = this._libav, s = this._c, r = this._pkt, a = this._frame;
854
+ let n = null;
855
+ this.encodeQueueSize--, this.dispatchEvent(new CustomEvent("dequeue"));
856
+ try {
857
+ let d = t._libavGetData();
858
+ const l = t.numberOfFrames;
859
+ if (!F(t.format)) {
860
+ let x = [];
861
+ for (let E = 0; E < t.numberOfChannels; E++)
862
+ x.push(d.subarray(E * l, (E + 1) * l));
863
+ d = x;
864
+ }
865
+ let c;
866
+ switch (t.format) {
867
+ case "u8":
868
+ c = i.AV_SAMPLE_FMT_U8;
869
+ break;
870
+ case "s16":
871
+ c = i.AV_SAMPLE_FMT_S16;
872
+ break;
873
+ case "s32":
874
+ c = i.AV_SAMPLE_FMT_S32;
875
+ break;
876
+ case "f32":
877
+ c = i.AV_SAMPLE_FMT_FLT;
878
+ break;
879
+ case "u8-planar":
880
+ c = i.AV_SAMPLE_FMT_U8P;
881
+ break;
882
+ case "s16-planar":
883
+ c = i.AV_SAMPLE_FMT_S16P;
884
+ break;
885
+ case "s32-planar":
886
+ c = i.AV_SAMPLE_FMT_S32P;
887
+ break;
888
+ case "f32-planar":
889
+ c = i.AV_SAMPLE_FMT_FLTP;
890
+ break;
891
+ default:
892
+ throw new TypeError("Invalid AudioSampleFormat");
893
+ }
894
+ const h = Math.floor(t.timestamp / 1e3), [f, u] = i.f64toi64(h), _ = t.numberOfChannels, m = _ === 1 ? 4 : (1 << _) - 1, p = t.sampleRate, b = {
895
+ data: d,
896
+ format: c,
897
+ pts: f,
898
+ ptshi: u,
899
+ channel_layout: m,
900
+ sample_rate: p
901
+ };
902
+ let y = null;
903
+ if (this._filter_in_ctx) {
904
+ const x = this._filter_in_ctx;
905
+ if (x.sample_fmt !== b.format || x.channel_layout !== b.channel_layout || x.sample_rate !== b.sample_rate) {
906
+ let E = yield this._filter([], !0);
907
+ E = E.filter((k) => {
908
+ let K;
909
+ return k.data[0].length ? K = k.data[0].length : K = k.data.length / k.channels, K === this._filter_out_ctx.frame_size;
910
+ }), E.length && (y = yield i.ff_encode_multi(s, a, r, E)), yield i.avfilter_graph_free_js(this._filter_graph), this._filter_in_ctx = null, this._filter_graph = this._buffersrc_ctx = this._buffersink_ctx = 0;
911
+ }
912
+ }
913
+ if (!this._filter_graph) {
914
+ const x = this._filter_in_ctx = {
915
+ sample_rate: b.sample_rate,
916
+ sample_fmt: b.format,
917
+ channel_layout: b.channel_layout
918
+ };
919
+ [this._filter_graph, this._buffersrc_ctx, this._buffersink_ctx] = yield i.ff_init_filter_graph("aresample", x, this._filter_out_ctx);
920
+ }
921
+ const v = yield this._filter([b]);
922
+ n = yield i.ff_encode_multi(s, a, r, v), y && (n = y.concat(n)), n.length && !this._outputMetadataFilled && v && v.length && (yield this._getOutputMetadata(v[0]));
923
+ } catch (d) {
924
+ this._p = this._p.then(() => {
925
+ this._closeAudioEncoder(d);
926
+ });
927
+ }
928
+ n && this._outputEncodedAudioChunks(n);
929
+ })).catch(this._error);
930
+ }
931
+ // Internal: Filter the given audio
932
+ _filter(e, t = !1) {
933
+ return O(this, void 0, void 0, function* () {
934
+ e.length && this._pts === null && (this._pts = e[0].pts || 0);
935
+ const i = yield this._libav.ff_filter_multi(this._buffersrc_ctx, this._buffersink_ctx, this._frame, e, t);
936
+ for (const s of i)
937
+ s.pts = this._pts, s.ptshi = 0, this._pts += s.nb_samples;
938
+ return i;
939
+ });
940
+ }
941
+ // Internal: Get output metadata
942
+ _getOutputMetadata(e) {
943
+ return O(this, void 0, void 0, function* () {
944
+ const t = this._libav, i = this._c, s = yield t.AVCodecContext_extradata(i), r = yield t.AVCodecContext_extradata_size(i);
945
+ let a = null;
946
+ s && r && (a = yield t.copyout_u8(s, r)), this._outputMetadata.decoderConfig.sampleRate = e.sample_rate, this._outputMetadata.decoderConfig.numberOfChannels = e.channels, a && (this._outputMetadata.decoderConfig.description = a), this._outputMetadataFilled = !0;
947
+ });
948
+ }
949
+ _outputEncodedAudioChunks(e) {
950
+ const t = this._libav, i = this._filter_out_ctx.sample_rate;
951
+ for (const s of e) {
952
+ const r = s.data, a = s.flags & 1 ? "key" : "delta";
953
+ let n = t.i64tof64(s.pts, s.ptshi);
954
+ n = Math.floor(n / i * 1e6);
955
+ let d;
956
+ typeof s.duration < "u" && (d = t.i64tof64(s.duration, s.durationhi || 0), d = Math.floor(d / i * 1e6));
957
+ const l = new T({
958
+ data: r,
959
+ type: a,
960
+ timestamp: n,
961
+ duration: d
962
+ });
963
+ this._outputMetadataFilled ? this._output(l, this._outputMetadata || void 0) : this._output(l);
964
+ }
965
+ }
966
+ flush() {
967
+ if (this.state !== "configured")
968
+ throw new DOMException("Invalid state", "InvalidStateError");
969
+ const e = this._p.then(() => O(this, void 0, void 0, function* () {
970
+ if (!this._c)
971
+ return;
972
+ const t = this._libav, i = this._c, s = this._frame, r = this._pkt, a = this._buffersrc_ctx;
973
+ this._buffersink_ctx;
974
+ let n = null;
975
+ try {
976
+ let d = null;
977
+ a && (d = yield this._filter([], !0)), n = yield t.ff_encode_multi(i, s, r, d || [], !0), !this._outputMetadataFilled && d && d.length && (yield this._getOutputMetadata(d[0]));
978
+ } catch (d) {
979
+ this._p = this._p.then(() => {
980
+ this._closeAudioEncoder(d);
981
+ });
982
+ }
983
+ n && this._outputEncodedAudioChunks(n);
984
+ }));
985
+ return this._p = e, e;
986
+ }
987
+ reset() {
988
+ this._resetAudioEncoder(new DOMException("Reset", "AbortError"));
989
+ }
990
+ close() {
991
+ this._closeAudioEncoder(new DOMException("Close", "AbortError"));
992
+ }
993
+ static isConfigSupported(e) {
994
+ return O(this, void 0, void 0, function* () {
995
+ const t = N(e.codec, e);
996
+ let i = !1;
997
+ if (t) {
998
+ const s = yield I();
999
+ try {
1000
+ const [, r, a, n] = yield s.ff_init_encoder(t.codec, t);
1001
+ yield s.ff_free_encoder(r, a, n), i = !0;
1002
+ } catch {
1003
+ }
1004
+ yield g(s);
1005
+ }
1006
+ return {
1007
+ supported: i,
1008
+ config: Y(e, ["codec", "sampleRate", "numberOfChannels", "bitrate"])
1009
+ };
1010
+ });
1011
+ }
1012
+ };
1013
+ const H = T;
1014
+ var Ae = function(o, e, t, i) {
1015
+ function s(r) {
1016
+ return r instanceof t ? r : new t(function(a) {
1017
+ a(r);
1018
+ });
1019
+ }
1020
+ return new (t || (t = Promise))(function(r, a) {
1021
+ function n(c) {
1022
+ try {
1023
+ l(i.next(c));
1024
+ } catch (h) {
1025
+ a(h);
1026
+ }
1027
+ }
1028
+ function d(c) {
1029
+ try {
1030
+ l(i.throw(c));
1031
+ } catch (h) {
1032
+ a(h);
1033
+ }
1034
+ }
1035
+ function l(c) {
1036
+ c.done ? r(c.value) : s(c.value).then(n, d);
1037
+ }
1038
+ l((i = i.apply(o, e || [])).next());
1039
+ });
1040
+ };
1041
+ let V = null, S = class L {
1042
+ constructor(e, t) {
1043
+ if (this.format = "I420", this.codedWidth = 0, this.codedHeight = 0, this.codedRect = null, this.visibleRect = null, this.displayWidth = 0, this.displayHeight = 0, this.timestamp = 0, this._layout = null, this._data = null, this._nonSquarePixels = !1, this._sar_num = 1, this._sar_den = 1, e instanceof ArrayBuffer || e.buffer instanceof ArrayBuffer)
1044
+ this._constructBuffer(e, t);
1045
+ else if (e instanceof L || globalThis.VideoFrame && e instanceof globalThis.VideoFrame) {
1046
+ const i = new Uint8Array(e.allocationSize());
1047
+ e.copyTo(i), this._constructBuffer(i, {
1048
+ transfer: [i.buffer],
1049
+ // 1. Let format be otherFrame.format.
1050
+ /* 2. FIXME: If init.alpha is discard, assign
1051
+ * otherFrame.format's equivalent opaque format format. */
1052
+ format: e.format,
1053
+ /* 3. Let validInit be the result of running the Validate
1054
+ * VideoFrameInit algorithm with format and otherFrame’s
1055
+ * [[coded width]] and [[coded height]]. */
1056
+ // 4. If validInit is false, throw a TypeError.
1057
+ /* 7. Assign the following attributes from otherFrame to frame:
1058
+ * codedWidth, codedHeight, colorSpace. */
1059
+ codedHeight: e.codedHeight,
1060
+ codedWidth: e.codedWidth,
1061
+ colorSpace: e.colorSpace,
1062
+ /* 8. Let defaultVisibleRect be the result of performing the
1063
+ * getter steps for visibleRect on otherFrame. */
1064
+ /* 9. Let defaultDisplayWidth, and defaultDisplayHeight be
1065
+ * otherFrame’s [[display width]], and [[display height]]
1066
+ * respectively. */
1067
+ /* 10. Run the Initialize Visible Rect and Display Size
1068
+ * algorithm with init, frame, defaultVisibleRect,
1069
+ * defaultDisplayWidth, and defaultDisplayHeight. */
1070
+ visibleRect: t?.visibleRect || e.visibleRect,
1071
+ displayHeight: t?.displayHeight || e.displayHeight,
1072
+ displayWidth: t?.displayWidth || e.displayWidth,
1073
+ /* 11. If duration exists in init, assign it to frame’s
1074
+ * [[duration]]. Otherwise, assign otherFrame.duration to
1075
+ * frame’s [[duration]]. */
1076
+ duration: t?.duration || e.duration,
1077
+ /* 12. If timestamp exists in init, assign it to frame’s
1078
+ * [[timestamp]]. Otherwise, assign otherFrame’s timestamp to
1079
+ * frame’s [[timestamp]]. */
1080
+ timestamp: t?.timestamp || e.timestamp,
1081
+ /* Assign the result of calling Copy VideoFrame metadata with
1082
+ * init’s metadata to frame.[[metadata]]. */
1083
+ metadata: JSON.parse(JSON.stringify(t?.metadata))
1084
+ });
1085
+ } else if (e instanceof HTMLVideoElement) {
1086
+ if (e.readyState === HTMLVideoElement.prototype.HAVE_NOTHING || e.readyState === HTMLVideoElement.prototype.HAVE_METADATA)
1087
+ throw new DOMException("Video is not ready for reading frames", "InvalidStateError");
1088
+ if (e.networkState === e.NETWORK_EMPTY)
1089
+ throw new DOMException("Video network state is empty", "InvalidStateError");
1090
+ this._constructCanvas(e, Object.assign(Object.assign({}, t), { timestamp: t?.timestamp || e.currentTime * 1e6 }));
1091
+ } else
1092
+ this._constructCanvas(e, t);
1093
+ }
1094
+ _constructCanvas(e, t) {
1095
+ let i = 0, s = 0;
1096
+ if (e.naturalWidth ? (i = e.naturalWidth, s = e.naturalHeight) : e.videoWidth ? (i = e.videoWidth, s = e.videoHeight) : e.width && (i = e.width, s = e.height), !i || !s)
1097
+ throw new DOMException("Could not determine dimensions", "InvalidStateError");
1098
+ V === null && (typeof OffscreenCanvas < "u" ? V = new OffscreenCanvas(i, s) : (V = document.createElement("canvas"), V.style.display = "none", document.body.appendChild(V))), V.width = i, V.height = s;
1099
+ const r = { desynchronized: !0, willReadFrequently: !0 }, a = V.getContext("2d", r);
1100
+ a.clearRect(0, 0, i, s), a.drawImage(e, 0, 0), this._constructBuffer(a.getImageData(0, 0, i, s).data, {
1101
+ format: "RGBA",
1102
+ codedWidth: i,
1103
+ codedHeight: s,
1104
+ timestamp: t?.timestamp || 0,
1105
+ duration: t?.duration || 0,
1106
+ layout: [{ offset: 0, stride: i * 4 }],
1107
+ displayWidth: t?.displayWidth || i,
1108
+ displayHeight: t?.displayHeight || s
1109
+ });
1110
+ }
1111
+ _constructBuffer(e, t) {
1112
+ L._checkValidVideoFrameBufferInit(t);
1113
+ const i = new DOMRect(0, 0, t.codedWidth, t.codedHeight);
1114
+ let s;
1115
+ t.visibleRect && (s = DOMRect.fromRect(t.visibleRect)), this.codedWidth = t.codedWidth, this.codedHeight = t.codedHeight;
1116
+ const r = this._parseVisibleRect(i, s || null);
1117
+ let a;
1118
+ t.layout && (t.layout instanceof Array ? a = t.layout : a = Array.from(t.layout)), this.format = t.format;
1119
+ const n = this._computeLayoutAndAllocationSize(r, a || null);
1120
+ if (e.byteLength < n.allocationSize)
1121
+ throw new TypeError("data is too small for layout");
1122
+ let d = !1;
1123
+ if (t.transfer) {
1124
+ let f;
1125
+ e.buffer ? f = e.buffer : f = e;
1126
+ let u;
1127
+ t.transfer instanceof Array ? u = t.transfer : u = Array.from(t.transfer);
1128
+ for (const _ of u)
1129
+ if (_ === f) {
1130
+ d = !0;
1131
+ break;
1132
+ }
1133
+ }
1134
+ const l = t.format;
1135
+ if (t.layout)
1136
+ t.layout instanceof Array ? this._layout = t.layout : this._layout = Array.from(t.layout);
1137
+ else {
1138
+ const f = P(l), u = [];
1139
+ let _ = 0;
1140
+ for (let m = 0; m < f; m++) {
1141
+ const p = te(l, m), b = z(l, m), y = ~~(this.codedWidth / p);
1142
+ u.push({ offset: _, stride: y }), _ += y * ~~(this.codedHeight / b);
1143
+ }
1144
+ this._layout = u;
1145
+ }
1146
+ if (this._data = new Uint8Array(e.buffer || e, e.byteOffset || 0), !d) {
1147
+ const f = P(l);
1148
+ let u = this._layout, _ = 1 / 0, m = 0;
1149
+ for (let p = 0; p < f; p++) {
1150
+ const b = u[p];
1151
+ let y = b.offset;
1152
+ y < _ && (_ = y);
1153
+ const v = z(l, p);
1154
+ y += b.stride * ~~(this.codedHeight / v), y > m && (m = y);
1155
+ }
1156
+ _ !== 0 && (u = this._layout = u.map((p) => ({
1157
+ offset: p.offset - _,
1158
+ stride: p.stride
1159
+ }))), this._data = this._data.slice(_, m);
1160
+ }
1161
+ const c = t.codedWidth, h = t.codedHeight;
1162
+ r.left, r.top, this.codedRect = new DOMRect(0, 0, c, h), this.visibleRect = r, t.visibleRect ? this.visibleRect = DOMRect.fromRect(t.visibleRect) : this.visibleRect = new DOMRect(0, 0, c, h), typeof t.displayWidth == "number" ? this.displayWidth = t.displayWidth : this.displayWidth = this.visibleRect.width, typeof t.displayHeight == "number" ? this.displayHeight = t.displayHeight : this.displayHeight = this.visibleRect.height, this.displayWidth !== this.visibleRect.width || this.displayHeight !== this.visibleRect.height ? (this._nonSquarePixels = !0, this._sar_num = this.displayWidth * this.visibleRect.width, this._sar_den = this.displayHeight * this.visibleRect.height) : (this._nonSquarePixels = !1, this._sar_num = this._sar_den = 1), this.timestamp = t.timestamp, this.duration = t.duration;
1163
+ }
1164
+ /**
1165
+ * Convert a polyfill VideoFrame to a native VideoFrame.
1166
+ * @param opts Conversion options
1167
+ */
1168
+ toNative(e = {}) {
1169
+ const t = new globalThis.VideoFrame(this._data, {
1170
+ layout: this._layout,
1171
+ format: this.format,
1172
+ codedWidth: this.codedWidth,
1173
+ codedHeight: this.codedHeight,
1174
+ visibleRect: this.visibleRect,
1175
+ displayWidth: this.displayWidth,
1176
+ displayHeight: this.displayHeight,
1177
+ duration: this.duration,
1178
+ timestamp: this.timestamp,
1179
+ transfer: e.transfer ? [this._data.buffer] : []
1180
+ });
1181
+ return e.transfer && this.close(), t;
1182
+ }
1183
+ /**
1184
+ * Convert a native VideoFrame to a polyfill VideoFrame. WARNING: Inefficient,
1185
+ * as the data cannot be transferred out.
1186
+ * @param from VideoFrame to copy in
1187
+ */
1188
+ static fromNative(e) {
1189
+ const t = e, i = new Uint8Array(t.allocationSize());
1190
+ return t.copyTo(i), new L(i, {
1191
+ format: t.format,
1192
+ codedWidth: t.codedWidth,
1193
+ codedHeight: t.codedHeight,
1194
+ visibleRect: t.visibleRect,
1195
+ displayWidth: t.displayWidth,
1196
+ displayHeight: t.displayHeight,
1197
+ duration: t.duration,
1198
+ timestamp: t.timestamp
1199
+ });
1200
+ }
1201
+ // Internal
1202
+ _libavGetData() {
1203
+ return this._data;
1204
+ }
1205
+ _libavGetLayout() {
1206
+ return this._layout;
1207
+ }
1208
+ static _checkValidVideoFrameBufferInit(e) {
1209
+ if (!e.codedWidth || !e.codedHeight)
1210
+ throw new TypeError("Invalid coded dimensions");
1211
+ if (e.visibleRect) {
1212
+ const t = DOMRect.fromRect(e.visibleRect);
1213
+ if (t.x < 0 || !Number.isFinite(t.x) || t.y < 0 || !Number.isFinite(t.y) || t.width < 0 || !Number.isFinite(t.width) || t.height < 0 || !Number.isFinite(t.height))
1214
+ throw new TypeError("Invalid visible rectangle");
1215
+ if (t.y + t.height > e.codedHeight)
1216
+ throw new TypeError("Visible rectangle outside of coded height");
1217
+ if (t.x + t.width > e.codedWidth)
1218
+ throw new TypeError("Visible rectangle outside of coded width");
1219
+ if (e.displayWidth && !e.displayHeight || !e.displayWidth && !e.displayHeight || e.displayWidth === 0 || e.displayHeight === 0)
1220
+ throw new TypeError("Invalid display dimensions");
1221
+ }
1222
+ }
1223
+ metadata() {
1224
+ if (this._data === null)
1225
+ throw new DOMException("Detached", "InvalidStateError");
1226
+ return null;
1227
+ }
1228
+ allocationSize(e = {}) {
1229
+ if (this._data === null)
1230
+ throw new DOMException("Detached", "InvalidStateError");
1231
+ if (this.format === null)
1232
+ throw new DOMException("Not supported", "NotSupportedError");
1233
+ return this._parseVideoFrameCopyToOptions(e).allocationSize;
1234
+ }
1235
+ _parseVideoFrameCopyToOptions(e) {
1236
+ const t = this.visibleRect;
1237
+ let i = e.rect ? new DOMRect(e.rect.x, e.rect.y, e.rect.width, e.rect.height) : null;
1238
+ const s = this._parseVisibleRect(t, i);
1239
+ let r = null;
1240
+ return e.layout && (e.layout instanceof Array ? r = e.layout : r = Array.from(e.layout)), this._computeLayoutAndAllocationSize(s, r);
1241
+ }
1242
+ _parseVisibleRect(e, t) {
1243
+ let i = e;
1244
+ if (t) {
1245
+ if (t.width === 0 || t.height === 0)
1246
+ throw new TypeError("Invalid rectangle");
1247
+ if (t.x + t.width > this.codedWidth)
1248
+ throw new TypeError("Invalid rectangle");
1249
+ if (t.y + t.height > this.codedHeight)
1250
+ throw new TypeError("Invalid rectangle");
1251
+ i = t;
1252
+ }
1253
+ if (!this._verifyRectOffsetAlignment(i))
1254
+ throw new TypeError("Invalid alignment");
1255
+ return i;
1256
+ }
1257
+ _computeLayoutAndAllocationSize(e, t) {
1258
+ let i = P(this.format);
1259
+ if (t && t.length !== i)
1260
+ throw new TypeError("Invalid layout");
1261
+ let s = 0, r = [], a = [], n = 0;
1262
+ for (; n < i; ) {
1263
+ const l = Ee(this.format, n), c = te(this.format, n), h = z(this.format, n), f = {
1264
+ destinationOffset: 0,
1265
+ destinationStride: 0,
1266
+ /* 6. Set computedLayout’s sourceTop to the result of the division
1267
+ * of truncated parsedRect.y by sampleHeight, rounded up to the
1268
+ * nearest integer. */
1269
+ sourceTop: Math.ceil(~~e.y / h),
1270
+ /* 7. Set computedLayout’s sourceHeight to the result of the
1271
+ * division of truncated parsedRect.height by sampleHeight,
1272
+ * rounded up to the nearest integer. */
1273
+ sourceHeight: Math.ceil(~~e.height / h),
1274
+ /* 8. Set computedLayout’s sourceLeftBytes to the result of the
1275
+ * integer division of truncated parsedRect.x by sampleWidth,
1276
+ * multiplied by sampleBytes. */
1277
+ sourceLeftBytes: ~~(e.x / c * l),
1278
+ /* 9. Set computedLayout’s sourceWidthBytes to the result of the
1279
+ * integer division of truncated parsedRect.width by
1280
+ * sampleHeight, multiplied by sampleBytes. */
1281
+ sourceWidthBytes: ~~(e.width / c * l)
1282
+ };
1283
+ if (t) {
1284
+ const p = t[n];
1285
+ if (p.stride < f.sourceWidthBytes)
1286
+ throw new TypeError("Invalid stride");
1287
+ f.destinationOffset = p.offset, f.destinationStride = p.stride;
1288
+ } else
1289
+ f.destinationOffset = s, f.destinationStride = f.sourceWidthBytes;
1290
+ const u = f.destinationStride * f.sourceHeight, _ = u + f.destinationOffset;
1291
+ if (u >= 4294967296 || _ >= 4294967296)
1292
+ throw new TypeError("Plane too large");
1293
+ a.push(_), _ > s && (s = _);
1294
+ let m = 0;
1295
+ for (; m < n; ) {
1296
+ const p = r[m];
1297
+ if (!(_ <= p.destinationOffset || a[m] <= f.destinationOffset)) throw new TypeError("Invalid plane layout");
1298
+ m++;
1299
+ }
1300
+ r.push(f), n++;
1301
+ }
1302
+ return {
1303
+ // 1. Assign computedLayouts to computedLayouts.
1304
+ computedLayouts: r,
1305
+ // 2. Assign minAllocationSize to allocationSize.
1306
+ allocationSize: s
1307
+ };
1308
+ }
1309
+ _verifyRectOffsetAlignment(e) {
1310
+ if (!this.format)
1311
+ return !0;
1312
+ let t = 0;
1313
+ const i = P(this.format);
1314
+ for (; t < i; ) {
1315
+ const s = te(this.format, t), r = z(this.format, t), a = e.x / s;
1316
+ if (a !== ~~a)
1317
+ return !1;
1318
+ const n = e.y / r;
1319
+ if (n !== ~~n)
1320
+ return !1;
1321
+ t++;
1322
+ }
1323
+ return !0;
1324
+ }
1325
+ copyTo(e, t = {}) {
1326
+ return Ae(this, void 0, void 0, function* () {
1327
+ const i = new Uint8Array(e.buffer || e, e.byteOffset || 0);
1328
+ if (this._data === null)
1329
+ throw new DOMException("Detached", "InvalidStateError");
1330
+ if (!this.format)
1331
+ throw new DOMException("No format", "NotSupportedError");
1332
+ const s = this._parseVideoFrameCopyToOptions(t);
1333
+ if (e.byteLength < s.allocationSize)
1334
+ throw new TypeError("Insufficient space");
1335
+ let r = [];
1336
+ {
1337
+ P(this.format);
1338
+ let a = 0;
1339
+ for (; a < s.computedLayouts.length; ) {
1340
+ const n = this._layout[a].stride, d = s.computedLayouts[a];
1341
+ let l = d.sourceTop * n;
1342
+ l += d.sourceLeftBytes;
1343
+ let c = d.destinationOffset;
1344
+ const h = d.sourceWidthBytes, f = {
1345
+ offset: d.destinationOffset,
1346
+ stride: d.destinationStride
1347
+ };
1348
+ let u = 0;
1349
+ for (; u < d.sourceHeight; )
1350
+ i.set(this._data.subarray(l, l + h), c), l += n, c += d.destinationStride, u++;
1351
+ a++, r.push(f);
1352
+ }
1353
+ }
1354
+ return r;
1355
+ });
1356
+ }
1357
+ clone() {
1358
+ return new L(this._data, {
1359
+ format: this.format,
1360
+ codedWidth: this.codedWidth,
1361
+ codedHeight: this.codedHeight,
1362
+ timestamp: this.timestamp,
1363
+ duration: this.duration,
1364
+ layout: this._layout,
1365
+ transfer: [this._data.buffer]
1366
+ });
1367
+ }
1368
+ close() {
1369
+ this._data = null;
1370
+ }
1371
+ };
1372
+ function ae(o, e) {
1373
+ let t = o.AV_PIX_FMT_RGBA;
1374
+ switch (e) {
1375
+ case "I420":
1376
+ t = o.AV_PIX_FMT_YUV420P;
1377
+ break;
1378
+ case "I420P10":
1379
+ t = 62;
1380
+ break;
1381
+ case "I420P12":
1382
+ t = 123;
1383
+ break;
1384
+ case "I420A":
1385
+ t = o.AV_PIX_FMT_YUVA420P;
1386
+ break;
1387
+ case "I420AP10":
1388
+ t = 87;
1389
+ break;
1390
+ case "I420AP12":
1391
+ throw new TypeError("YUV420P12 is not supported by libav");
1392
+ case "I422":
1393
+ t = o.AV_PIX_FMT_YUV422P;
1394
+ break;
1395
+ case "I422P10":
1396
+ t = 64;
1397
+ break;
1398
+ case "I422P12":
1399
+ t = 127;
1400
+ break;
1401
+ case "I422A":
1402
+ t = 78;
1403
+ break;
1404
+ case "I422AP10":
1405
+ t = 89;
1406
+ break;
1407
+ case "I422AP10":
1408
+ t = 186;
1409
+ break;
1410
+ case "I444":
1411
+ t = o.AV_PIX_FMT_YUV444P;
1412
+ break;
1413
+ case "I444P10":
1414
+ t = 68;
1415
+ break;
1416
+ case "I444P12":
1417
+ t = 131;
1418
+ break;
1419
+ case "I444A":
1420
+ t = 79;
1421
+ break;
1422
+ case "I444AP10":
1423
+ t = 91;
1424
+ break;
1425
+ case "I444AP12":
1426
+ t = 188;
1427
+ break;
1428
+ case "NV12":
1429
+ t = o.AV_PIX_FMT_NV12;
1430
+ break;
1431
+ case "RGBA":
1432
+ t = o.AV_PIX_FMT_RGBA;
1433
+ break;
1434
+ case "RGBX":
1435
+ t = 119;
1436
+ break;
1437
+ case "BGRA":
1438
+ t = o.AV_PIX_FMT_BGRA;
1439
+ break;
1440
+ case "BGRX":
1441
+ t = 121;
1442
+ break;
1443
+ default:
1444
+ throw new TypeError("Invalid VideoPixelFormat");
1445
+ }
1446
+ return t;
1447
+ }
1448
+ function P(o) {
1449
+ switch (o) {
1450
+ case "I420":
1451
+ case "I420P10":
1452
+ case "I420P12":
1453
+ case "I422":
1454
+ case "I422P10":
1455
+ case "I422P12":
1456
+ case "I444":
1457
+ case "I444P10":
1458
+ case "I444P12":
1459
+ return 3;
1460
+ case "I420A":
1461
+ case "I420AP10":
1462
+ case "I420AP12":
1463
+ case "I422A":
1464
+ case "I422AP10":
1465
+ case "I422AP12":
1466
+ case "I444A":
1467
+ case "I444AP10":
1468
+ case "I444AP12":
1469
+ return 4;
1470
+ case "NV12":
1471
+ return 2;
1472
+ case "RGBA":
1473
+ case "RGBX":
1474
+ case "BGRA":
1475
+ case "BGRX":
1476
+ return 1;
1477
+ default:
1478
+ throw new DOMException("Unsupported video pixel format", "NotSupportedError");
1479
+ }
1480
+ }
1481
+ function Ee(o, e) {
1482
+ switch (o) {
1483
+ case "I420":
1484
+ case "I420A":
1485
+ case "I422":
1486
+ case "I422A":
1487
+ case "I444":
1488
+ case "I444A":
1489
+ return 1;
1490
+ case "I420P10":
1491
+ case "I420AP10":
1492
+ case "I422P10":
1493
+ case "I422AP10":
1494
+ case "I444P10":
1495
+ case "I444AP10":
1496
+ case "I420P12":
1497
+ case "I420AP12":
1498
+ case "I422P12":
1499
+ case "I422AP12":
1500
+ case "I444P12":
1501
+ case "I444AP12":
1502
+ return 2;
1503
+ case "NV12":
1504
+ return e === 1 ? 2 : 1;
1505
+ case "RGBA":
1506
+ case "RGBX":
1507
+ case "BGRA":
1508
+ case "BGRX":
1509
+ return 4;
1510
+ default:
1511
+ throw new DOMException("Unsupported video pixel format", "NotSupportedError");
1512
+ }
1513
+ }
1514
+ function te(o, e) {
1515
+ if (e === 0 || e === 3)
1516
+ return 1;
1517
+ switch (o) {
1518
+ case "I420":
1519
+ case "I420P10":
1520
+ case "I420P12":
1521
+ case "I420A":
1522
+ case "I420AP10":
1523
+ case "I420AP12":
1524
+ case "I422":
1525
+ case "I422P10":
1526
+ case "I422P12":
1527
+ case "I422A":
1528
+ case "I422AP10":
1529
+ case "I422AP12":
1530
+ return 2;
1531
+ case "I444":
1532
+ case "I444P10":
1533
+ case "I444P12":
1534
+ case "I444A":
1535
+ case "I444AP10":
1536
+ case "I444AP12":
1537
+ return 1;
1538
+ case "NV12":
1539
+ return 2;
1540
+ case "RGBA":
1541
+ case "RGBX":
1542
+ case "BGRA":
1543
+ case "BGRX":
1544
+ return 1;
1545
+ default:
1546
+ throw new DOMException("Unsupported video pixel format", "NotSupportedError");
1547
+ }
1548
+ }
1549
+ function z(o, e) {
1550
+ if (e === 0 || e === 3)
1551
+ return 1;
1552
+ switch (o) {
1553
+ case "I420":
1554
+ case "I420P10":
1555
+ case "I420P12":
1556
+ case "I420A":
1557
+ case "I420AP10":
1558
+ case "I420AP12":
1559
+ return 2;
1560
+ case "I422":
1561
+ case "I422P10":
1562
+ case "I422P12":
1563
+ case "I422A":
1564
+ case "I422AP10":
1565
+ case "I422AP12":
1566
+ case "I444":
1567
+ case "I444P10":
1568
+ case "I444P12":
1569
+ case "I444A":
1570
+ case "I444AP10":
1571
+ case "I444AP12":
1572
+ return 1;
1573
+ case "NV12":
1574
+ return 2;
1575
+ case "RGBA":
1576
+ case "RGBX":
1577
+ case "BGRA":
1578
+ case "BGRX":
1579
+ return 1;
1580
+ default:
1581
+ throw new DOMException("Unsupported video pixel format", "NotSupportedError");
1582
+ }
1583
+ }
1584
+ var R = function(o, e, t, i) {
1585
+ function s(r) {
1586
+ return r instanceof t ? r : new t(function(a) {
1587
+ a(r);
1588
+ });
1589
+ }
1590
+ return new (t || (t = Promise))(function(r, a) {
1591
+ function n(c) {
1592
+ try {
1593
+ l(i.next(c));
1594
+ } catch (h) {
1595
+ a(h);
1596
+ }
1597
+ }
1598
+ function d(c) {
1599
+ try {
1600
+ l(i.throw(c));
1601
+ } catch (h) {
1602
+ a(h);
1603
+ }
1604
+ }
1605
+ function l(c) {
1606
+ c.done ? r(c.value) : s(c.value).then(n, d);
1607
+ }
1608
+ l((i = i.apply(o, e || [])).next());
1609
+ });
1610
+ };
1611
+ let q = class extends Q {
1612
+ constructor(e) {
1613
+ super(), this._p = Promise.all([]), this._libav = null, this._codec = this._c = this._pkt = this._frame = 0, this._output = e.output, this._error = e.error, this.state = "unconfigured", this.decodeQueueSize = 0;
1614
+ }
1615
+ configure(e) {
1616
+ if (this.state === "closed")
1617
+ throw new DOMException("Decoder is closed", "InvalidStateError");
1618
+ this._libav && (this._p = this._p.then(() => this._free())), this.state = "configured", this._p = this._p.then(() => R(this, void 0, void 0, function* () {
1619
+ const t = U(e.codec, e);
1620
+ if (!t) {
1621
+ this._closeVideoDecoder(new DOMException("Unsupported codec", "NotSupportedError"));
1622
+ return;
1623
+ }
1624
+ const i = this._libav = yield I();
1625
+ [this._codec, this._c, this._pkt, this._frame] = yield i.ff_init_decoder(t.codec), yield i.AVCodecContext_time_base_s(this._c, 1, 1e3);
1626
+ })).catch(this._error);
1627
+ }
1628
+ // Our own algorithm, close libav
1629
+ _free() {
1630
+ return R(this, void 0, void 0, function* () {
1631
+ this._c && (yield this._libav.ff_free_decoder(this._c, this._pkt, this._frame), this._codec = this._c = this._pkt = this._frame = 0), this._libav && (g(this._libav), this._libav = null);
1632
+ });
1633
+ }
1634
+ _closeVideoDecoder(e) {
1635
+ this._resetVideoDecoder(e), this.state = "closed", this._p = this._p.then(() => this._free()), e.name !== "AbortError" && (this._p = this._p.then(() => {
1636
+ this._error(e);
1637
+ }));
1638
+ }
1639
+ _resetVideoDecoder(e) {
1640
+ if (this.state === "closed")
1641
+ throw new DOMException("Decoder closed", "InvalidStateError");
1642
+ this.state = "unconfigured", this._p = this._p.then(() => this._free());
1643
+ }
1644
+ decode(e) {
1645
+ const t = this;
1646
+ if (this.state !== "configured")
1647
+ throw new DOMException("Unconfigured", "InvalidStateError");
1648
+ this.decodeQueueSize++, this._p = this._p.then(function() {
1649
+ return R(this, void 0, void 0, function* () {
1650
+ const i = t._libav, s = t._c, r = t._pkt, a = t._frame;
1651
+ let n = null;
1652
+ t.decodeQueueSize--, t.dispatchEvent(new CustomEvent("dequeue"));
1653
+ try {
1654
+ const d = Math.floor(e.timestamp / 1e3), [l, c] = i.f64toi64(d), h = {
1655
+ data: e._libavGetData(),
1656
+ pts: l,
1657
+ ptshi: c,
1658
+ dts: l,
1659
+ dtshi: c
1660
+ };
1661
+ e.duration && (h.duration = Math.floor(e.duration / 1e3), h.durationhi = 0), n = yield i.ff_decode_multi(s, r, a, [h]);
1662
+ } catch (d) {
1663
+ t._p = t._p.then(() => {
1664
+ t._closeVideoDecoder(d);
1665
+ });
1666
+ }
1667
+ n && t._outputVideoFrames(n);
1668
+ });
1669
+ }).catch(this._error);
1670
+ }
1671
+ _outputVideoFrames(e) {
1672
+ const t = this._libav;
1673
+ for (const i of e) {
1674
+ let s;
1675
+ switch (i.format) {
1676
+ case t.AV_PIX_FMT_YUV420P:
1677
+ s = "I420";
1678
+ break;
1679
+ case 62:
1680
+ s = "I420P10";
1681
+ break;
1682
+ case 123:
1683
+ s = "I420P12";
1684
+ break;
1685
+ case t.AV_PIX_FMT_YUVA420P:
1686
+ s = "I420A";
1687
+ break;
1688
+ case 87:
1689
+ s = "I420AP10";
1690
+ break;
1691
+ case t.AV_PIX_FMT_YUV422P:
1692
+ s = "I422";
1693
+ break;
1694
+ case 64:
1695
+ s = "I422P10";
1696
+ break;
1697
+ case 127:
1698
+ s = "I422P12";
1699
+ break;
1700
+ case 78:
1701
+ s = "I422A";
1702
+ break;
1703
+ case 89:
1704
+ s = "I422AP10";
1705
+ break;
1706
+ case 186:
1707
+ s = "I422AP12";
1708
+ break;
1709
+ case t.AV_PIX_FMT_YUV444P:
1710
+ s = "I444";
1711
+ break;
1712
+ case 68:
1713
+ s = "I444P10";
1714
+ break;
1715
+ case 131:
1716
+ s = "I444P12";
1717
+ break;
1718
+ case 79:
1719
+ s = "I444A";
1720
+ break;
1721
+ case 91:
1722
+ s = "I444AP10";
1723
+ break;
1724
+ case 188:
1725
+ s = "I444AP12";
1726
+ break;
1727
+ case t.AV_PIX_FMT_NV12:
1728
+ s = "NV12";
1729
+ break;
1730
+ case t.AV_PIX_FMT_RGBA:
1731
+ s = "RGBA";
1732
+ break;
1733
+ case 119:
1734
+ s = "RGBX";
1735
+ break;
1736
+ case t.AV_PIX_FMT_BGRA:
1737
+ s = "BGRA";
1738
+ break;
1739
+ case 121:
1740
+ s = "BGRX";
1741
+ break;
1742
+ default:
1743
+ throw new DOMException("Unsupported libav format!", "EncodingError");
1744
+ }
1745
+ const r = i.width, a = i.height;
1746
+ let n;
1747
+ i.crop ? n = new DOMRect(i.crop.left, i.crop.top, r - i.crop.left - i.crop.right, a - i.crop.top - i.crop.bottom) : n = new DOMRect(0, 0, r, a);
1748
+ let d = r, l = a;
1749
+ if (i.sample_aspect_ratio && i.sample_aspect_ratio[0]) {
1750
+ const f = i.sample_aspect_ratio;
1751
+ f[0] > f[1] ? d = ~~(r * f[0] / f[1]) : l = ~~(a * f[1] / f[0]);
1752
+ }
1753
+ const c = t.i64tof64(i.pts, i.ptshi) * 1e3, h = new S(i.data, {
1754
+ layout: i.layout,
1755
+ format: s,
1756
+ codedWidth: r,
1757
+ codedHeight: a,
1758
+ visibleRect: n,
1759
+ displayWidth: d,
1760
+ displayHeight: l,
1761
+ timestamp: c
1762
+ });
1763
+ this._output(h);
1764
+ }
1765
+ }
1766
+ flush() {
1767
+ if (this.state !== "configured")
1768
+ throw new DOMException("Invalid state", "InvalidStateError");
1769
+ const e = this._p.then(() => R(this, void 0, void 0, function* () {
1770
+ if (!this._c)
1771
+ return;
1772
+ const t = this._libav, i = this._c, s = this._pkt, r = this._frame;
1773
+ let a = null;
1774
+ try {
1775
+ a = yield t.ff_decode_multi(i, s, r, [], !0);
1776
+ } catch (n) {
1777
+ this._p = this._p.then(() => {
1778
+ this._closeVideoDecoder(n);
1779
+ });
1780
+ }
1781
+ a && this._outputVideoFrames(a);
1782
+ }));
1783
+ return this._p = e, e;
1784
+ }
1785
+ reset() {
1786
+ this._resetVideoDecoder(new DOMException("Reset", "AbortError"));
1787
+ }
1788
+ close() {
1789
+ this._closeVideoDecoder(new DOMException("Close", "AbortError"));
1790
+ }
1791
+ static isConfigSupported(e) {
1792
+ return R(this, void 0, void 0, function* () {
1793
+ const t = U(e.codec, e);
1794
+ let i = !1;
1795
+ if (t) {
1796
+ const s = yield I();
1797
+ try {
1798
+ const [, r, a, n] = yield s.ff_init_decoder(t.codec);
1799
+ yield s.ff_free_decoder(r, a, n), i = !0;
1800
+ } catch {
1801
+ }
1802
+ yield g(s);
1803
+ }
1804
+ return {
1805
+ supported: i,
1806
+ config: Y(e, ["codec", "codedWidth", "codedHeight"])
1807
+ };
1808
+ });
1809
+ }
1810
+ };
1811
+ var D = function(o, e, t, i) {
1812
+ function s(r) {
1813
+ return r instanceof t ? r : new t(function(a) {
1814
+ a(r);
1815
+ });
1816
+ }
1817
+ return new (t || (t = Promise))(function(r, a) {
1818
+ function n(c) {
1819
+ try {
1820
+ l(i.next(c));
1821
+ } catch (h) {
1822
+ a(h);
1823
+ }
1824
+ }
1825
+ function d(c) {
1826
+ try {
1827
+ l(i.throw(c));
1828
+ } catch (h) {
1829
+ a(h);
1830
+ }
1831
+ }
1832
+ function l(c) {
1833
+ c.done ? r(c.value) : s(c.value).then(n, d);
1834
+ }
1835
+ l((i = i.apply(o, e || [])).next());
1836
+ });
1837
+ };
1838
+ let j = class extends Q {
1839
+ constructor(e) {
1840
+ super(), this._extradataSet = !1, this._extradata = null, this._nonSquarePixels = !1, this._sar_num = 1, this._sar_den = 1, this._p = Promise.all([]), this._libav = null, this._codec = this._c = this._frame = this._pkt = 0, this._output = e.output, this._error = e.error, this._metadata = null, this.state = "unconfigured", this.encodeQueueSize = 0;
1841
+ }
1842
+ configure(e) {
1843
+ if (this.state === "closed")
1844
+ throw new DOMException("Encoder is closed", "InvalidStateError");
1845
+ this._libav && (this._p = this._p.then(() => this._free())), this.state = "configured", this._p = this._p.then(() => D(this, void 0, void 0, function* () {
1846
+ const t = N(e.codec, e);
1847
+ if (!t) {
1848
+ this._closeVideoEncoder(new DOMException("Unsupported codec", "NotSupportedError"));
1849
+ return;
1850
+ }
1851
+ const i = this._libav = yield I();
1852
+ this._metadata = {
1853
+ decoderConfig: {
1854
+ codec: t.codec
1855
+ }
1856
+ }, [this._codec, this._c, this._frame, this._pkt] = yield i.ff_init_encoder(t.codec, t), this._extradataSet = !1, this._extradata = null, yield i.AVCodecContext_time_base_s(this._c, 1, 1e3);
1857
+ const s = e.width, r = e.height;
1858
+ this._sws = 0, this._swsFrame = 0, this._swsOut = {
1859
+ width: s,
1860
+ height: r,
1861
+ format: t.ctx.pix_fmt
1862
+ };
1863
+ const a = e.displayWidth || s, n = e.displayHeight || r;
1864
+ a !== s || n !== r ? (this._nonSquarePixels = !0, this._sar_num = a * r, this._sar_den = n * s) : this._nonSquarePixels = !1;
1865
+ })).catch(this._error);
1866
+ }
1867
+ // Our own algorithm, close libav
1868
+ _free() {
1869
+ return D(this, void 0, void 0, function* () {
1870
+ this._sws && (yield this._libav.av_frame_free_js(this._swsFrame), yield this._libav.sws_freeContext(this._sws), this._sws = this._swsFrame = 0, this._swsIn = this._swsOut = void 0), this._c && (yield this._libav.ff_free_encoder(this._c, this._frame, this._pkt), this._codec = this._c = this._frame = this._pkt = 0), this._libav && (g(this._libav), this._libav = null);
1871
+ });
1872
+ }
1873
+ _closeVideoEncoder(e) {
1874
+ this._resetVideoEncoder(e), this.state = "closed", this._p = this._p.then(() => this._free()), e.name !== "AbortError" && (this._p = this._p.then(() => {
1875
+ this._error(e);
1876
+ }));
1877
+ }
1878
+ _resetVideoEncoder(e) {
1879
+ if (this.state === "closed")
1880
+ throw new DOMException("Encoder closed", "InvalidStateError");
1881
+ this.state = "unconfigured", this._p = this._p.then(() => this._free());
1882
+ }
1883
+ encode(e, t = {}) {
1884
+ if (e._libavGetData() === null)
1885
+ throw new TypeError("Detached");
1886
+ if (this.state !== "configured")
1887
+ throw new DOMException("Unconfigured", "InvalidStateError");
1888
+ const i = e.clone();
1889
+ this.encodeQueueSize++, this._p = this._p.then(() => D(this, void 0, void 0, function* () {
1890
+ const s = this._libav, r = this._c, a = this._pkt, n = this._frame, d = this._swsOut;
1891
+ let l = null;
1892
+ this.encodeQueueSize--, this.dispatchEvent(new CustomEvent("dequeue"));
1893
+ try {
1894
+ const c = ae(s, i.format), h = i._libavGetData(), f = i._libavGetLayout(), u = Math.floor(i.timestamp / 1e3), [_, m] = s.f64toi64(u), p = {
1895
+ data: h,
1896
+ layout: f,
1897
+ format: c,
1898
+ pts: _,
1899
+ ptshi: m,
1900
+ width: i.codedWidth,
1901
+ height: i.codedHeight,
1902
+ crop: {
1903
+ left: i.visibleRect.left,
1904
+ right: i.visibleRect.right,
1905
+ top: i.visibleRect.top,
1906
+ bottom: i.visibleRect.bottom
1907
+ },
1908
+ key_frame: t.keyFrame ? 1 : 0,
1909
+ pict_type: t.keyFrame ? 1 : 0
1910
+ };
1911
+ if (p.width !== d.width || p.height !== d.height || p.format !== d.format) {
1912
+ i._nonSquarePixels && (p.sample_aspect_ratio = [
1913
+ i._sar_num,
1914
+ i._sar_den
1915
+ ]);
1916
+ let b = this._sws, y = this._swsIn, v = this._swsFrame;
1917
+ (!b || p.width !== y.width || p.height !== y.height || p.format !== y.format) && (b && (yield s.sws_freeContext(b)), y = {
1918
+ width: p.width,
1919
+ height: p.height,
1920
+ format: p.format
1921
+ }, b = yield s.sws_getContext(y.width, y.height, y.format, d.width, d.height, d.format, 2, 0, 0, 0), this._sws = b, this._swsIn = y, v || (this._swsFrame = v = yield s.av_frame_alloc()));
1922
+ const [, x, , , , , , E] = yield Promise.all([
1923
+ s.ff_copyin_frame(n, p),
1924
+ s.sws_scale_frame(b, v, n),
1925
+ this._nonSquarePixels ? s.AVFrame_sample_aspect_ratio_s(v, this._sar_num, this._sar_den) : null,
1926
+ s.AVFrame_pts_s(v, _),
1927
+ s.AVFrame_ptshi_s(v, m),
1928
+ s.AVFrame_key_frame_s(v, t.keyFrame ? 1 : 0),
1929
+ s.AVFrame_pict_type_s(v, t.keyFrame ? 1 : 0),
1930
+ s.avcodec_send_frame(r, v)
1931
+ ]);
1932
+ if (x < 0 || E < 0)
1933
+ throw new Error("Encoding failed!");
1934
+ for (l = []; ; ) {
1935
+ const k = yield s.avcodec_receive_packet(r, a);
1936
+ if (k === -s.EAGAIN)
1937
+ break;
1938
+ if (k < 0)
1939
+ throw new Error("Encoding failed!");
1940
+ l.push(yield s.ff_copyout_packet(a));
1941
+ }
1942
+ } else
1943
+ this._nonSquarePixels && (p.sample_aspect_ratio = [
1944
+ this._sar_num,
1945
+ this._sar_den
1946
+ ]), l = yield s.ff_encode_multi(r, n, a, [p]);
1947
+ l.length && !this._extradataSet && (yield this._getExtradata());
1948
+ } catch (c) {
1949
+ this._p = this._p.then(() => {
1950
+ this._closeVideoEncoder(c);
1951
+ });
1952
+ return;
1953
+ }
1954
+ l && this._outputEncodedVideoChunks(l);
1955
+ })).catch(this._error);
1956
+ }
1957
+ // Internal: Get extradata
1958
+ _getExtradata() {
1959
+ return D(this, void 0, void 0, function* () {
1960
+ const e = this._libav, t = this._c, i = yield e.AVCodecContext_extradata(t), s = yield e.AVCodecContext_extradata_size(t);
1961
+ i && s && (this._metadata.decoderConfig.description = this._extradata = yield e.copyout_u8(i, s)), this._extradataSet = !0;
1962
+ });
1963
+ }
1964
+ _outputEncodedVideoChunks(e) {
1965
+ const t = this._libav;
1966
+ for (const i of e) {
1967
+ const s = i.flags & 1 ? "key" : "delta", r = t.i64tof64(i.pts, i.ptshi) * 1e3, a = new H({
1968
+ type: s,
1969
+ timestamp: r,
1970
+ data: i.data
1971
+ });
1972
+ this._extradataSet ? this._output(a, this._metadata || void 0) : this._output(a);
1973
+ }
1974
+ }
1975
+ flush() {
1976
+ if (this.state !== "configured")
1977
+ throw new DOMException("Invalid state", "InvalidStateError");
1978
+ const e = this._p.then(() => D(this, void 0, void 0, function* () {
1979
+ if (!this._c)
1980
+ return;
1981
+ const t = this._libav, i = this._c, s = this._frame, r = this._pkt;
1982
+ let a = null;
1983
+ try {
1984
+ a = yield t.ff_encode_multi(i, s, r, [], !0), this._extradataSet || (yield this._getExtradata());
1985
+ } catch (n) {
1986
+ this._p = this._p.then(() => {
1987
+ this._closeVideoEncoder(n);
1988
+ });
1989
+ }
1990
+ a && this._outputEncodedVideoChunks(a);
1991
+ }));
1992
+ return this._p = e, e;
1993
+ }
1994
+ reset() {
1995
+ this._resetVideoEncoder(new DOMException("Reset", "AbortError"));
1996
+ }
1997
+ close() {
1998
+ this._closeVideoEncoder(new DOMException("Close", "AbortError"));
1999
+ }
2000
+ static isConfigSupported(e) {
2001
+ return D(this, void 0, void 0, function* () {
2002
+ const t = N(e.codec, e);
2003
+ let i = !1;
2004
+ if (t) {
2005
+ const s = yield I();
2006
+ try {
2007
+ const [, r, a, n] = yield s.ff_init_encoder(t.codec, t);
2008
+ yield s.ff_free_encoder(r, a, n), i = !0;
2009
+ } catch {
2010
+ }
2011
+ yield g(s);
2012
+ }
2013
+ return {
2014
+ supported: i,
2015
+ config: Y(e, ["codec", "width", "height", "bitrate", "framerate", "latencyMode"])
2016
+ };
2017
+ });
2018
+ }
2019
+ };
2020
+ var fe = function(o, e, t, i) {
2021
+ function s(r) {
2022
+ return r instanceof t ? r : new t(function(a) {
2023
+ a(r);
2024
+ });
2025
+ }
2026
+ return new (t || (t = Promise))(function(r, a) {
2027
+ function n(c) {
2028
+ try {
2029
+ l(i.next(c));
2030
+ } catch (h) {
2031
+ a(h);
2032
+ }
2033
+ }
2034
+ function d(c) {
2035
+ try {
2036
+ l(i.throw(c));
2037
+ } catch (h) {
2038
+ a(h);
2039
+ }
2040
+ }
2041
+ function l(c) {
2042
+ c.done ? r(c.value) : s(c.value).then(n, d);
2043
+ }
2044
+ l((i = i.apply(o, e || [])).next());
2045
+ });
2046
+ };
2047
+ let w = null, A = null, oe = null, ue = null, se = null;
2048
+ function xe(o, e) {
2049
+ return fe(this, void 0, void 0, function* () {
2050
+ "importScripts" in globalThis && (C.nolibavworker = !0), w = yield C.LibAV(Object.assign(Object.assign({}, o), { noworker: !0, yesthreads: !1 })), A = yield C.LibAV(o), "CanvasRenderingContext2D" in globalThis && (oe = CanvasRenderingContext2D.prototype.drawImage, e && (CanvasRenderingContext2D.prototype.drawImage = Ie)), "OffscreenCanvasRenderingContext2D" in globalThis && (ue = OffscreenCanvasRenderingContext2D.prototype.drawImage, e && (OffscreenCanvasRenderingContext2D.prototype.drawImage = ge)), se = globalThis.createImageBitmap, e && (globalThis.createImageBitmap = _e);
2051
+ });
2052
+ }
2053
+ function ne(o, e, t, i, s, r, a, n, d, l) {
2054
+ if (!e._data)
2055
+ return oe.apply(o, Array.prototype.slice.call(arguments, 1));
2056
+ typeof s > "u" ? (a = t, n = i) : typeof a > "u" && (a = t, n = i, d = s, l = r, s = void 0, r = void 0), typeof d > "u" && (d = e.displayWidth, l = e.displayHeight);
2057
+ const c = ae(w, e.format), h = w.sws_getContext_sync(e.visibleRect.width, e.visibleRect.height, c, d, l, w.AV_PIX_FMT_RGBA, 2, 0, 0, 0), f = w.av_frame_alloc_sync(), u = w.av_frame_alloc_sync();
2058
+ let _, m;
2059
+ e._libavGetData ? (_ = e._libavGetData(), m = e._libavGetLayout()) : (_ = e._data, m = e._layout), w.ff_copyin_frame_sync(f, {
2060
+ data: _,
2061
+ layout: m,
2062
+ format: c,
2063
+ width: e.codedWidth,
2064
+ height: e.codedHeight,
2065
+ crop: {
2066
+ left: e.visibleRect.left,
2067
+ right: e.visibleRect.right,
2068
+ top: e.visibleRect.top,
2069
+ bottom: e.visibleRect.bottom
2070
+ }
2071
+ }), w.sws_scale_frame_sync(h, u, f);
2072
+ const p = w.ff_copyout_frame_video_imagedata_sync(u);
2073
+ o.putImageData(p, a, n), w.av_frame_free_js_sync(u), w.av_frame_free_js_sync(f), w.sws_freeContext_sync(h);
2074
+ }
2075
+ function Ie(o, e, t, i, s, r, a, n, d) {
2076
+ return o instanceof S ? ne(this, o, e, t, i, s, r, a, n, d) : oe.apply(this, arguments);
2077
+ }
2078
+ function ge(o, e, t, i, s, r, a, n, d) {
2079
+ return o instanceof S ? ne(this, o, e, t, i, s, r, a, n, d) : ue.apply(this, arguments);
2080
+ }
2081
+ function _e(o, e = {}) {
2082
+ if (!o._data)
2083
+ return se.apply(globalThis, arguments);
2084
+ const t = ae(A, o.format), i = typeof e.resizeWidth == "number" ? e.resizeWidth : o.displayWidth, s = typeof e.resizeHeight == "number" ? e.resizeHeight : o.displayHeight;
2085
+ return fe(this, void 0, void 0, function* () {
2086
+ const [r, a, n] = yield Promise.all([
2087
+ A.sws_getContext(o.visibleRect.width, o.visibleRect.height, t, i, s, A.AV_PIX_FMT_RGBA, 2, 0, 0, 0),
2088
+ A.av_frame_alloc(),
2089
+ A.av_frame_alloc()
2090
+ ]);
2091
+ let d, l;
2092
+ o._libavGetData ? (d = o._libavGetData(), l = o._libavGetLayout()) : o._data ? (d = o._data, l = o._layout) : (d = new Uint8Array(o.allocationSize()), yield o.copyTo(d)), yield A.ff_copyin_frame(a, {
2093
+ data: d,
2094
+ layout: l,
2095
+ format: t,
2096
+ width: o.codedWidth,
2097
+ height: o.codedHeight,
2098
+ crop: {
2099
+ left: o.visibleRect.left,
2100
+ right: o.visibleRect.right,
2101
+ top: o.visibleRect.top,
2102
+ bottom: o.visibleRect.bottom
2103
+ }
2104
+ }), // Rescale
2105
+ yield A.sws_scale_frame(r, n, a);
2106
+ const c = yield A.ff_copyout_frame_video_imagedata(n);
2107
+ return yield Promise.all([
2108
+ A.av_frame_free_js(n),
2109
+ A.av_frame_free_js(a),
2110
+ A.sws_freeContext(r)
2111
+ ]), yield se(c);
2112
+ });
2113
+ }
2114
+ var J = function(o, e, t, i) {
2115
+ function s(r) {
2116
+ return r instanceof t ? r : new t(function(a) {
2117
+ a(r);
2118
+ });
2119
+ }
2120
+ return new (t || (t = Promise))(function(r, a) {
2121
+ function n(c) {
2122
+ try {
2123
+ l(i.next(c));
2124
+ } catch (h) {
2125
+ a(h);
2126
+ }
2127
+ }
2128
+ function d(c) {
2129
+ try {
2130
+ l(i.throw(c));
2131
+ } catch (h) {
2132
+ a(h);
2133
+ }
2134
+ }
2135
+ function l(c) {
2136
+ c.done ? r(c.value) : s(c.value).then(n, d);
2137
+ }
2138
+ l((i = i.apply(o, e || [])).next());
2139
+ });
2140
+ };
2141
+ let W = class extends Error {
2142
+ constructor() {
2143
+ super("The requested configuration is not supported");
2144
+ }
2145
+ };
2146
+ function Ve(o) {
2147
+ return J(this, void 0, void 0, function* () {
2148
+ try {
2149
+ if (typeof globalThis.AudioDecoder < "u" && (yield globalThis.AudioDecoder.isConfigSupported(o)).supported)
2150
+ return {
2151
+ AudioDecoder: globalThis.AudioDecoder,
2152
+ EncodedAudioChunk: globalThis.EncodedAudioChunk,
2153
+ AudioData: globalThis.AudioData
2154
+ };
2155
+ } catch {
2156
+ }
2157
+ if ((yield $.isConfigSupported(o)).supported)
2158
+ return {
2159
+ AudioDecoder: $,
2160
+ EncodedAudioChunk: T,
2161
+ AudioData: B
2162
+ };
2163
+ throw new W();
2164
+ });
2165
+ }
2166
+ function ke(o) {
2167
+ return J(this, void 0, void 0, function* () {
2168
+ try {
2169
+ if (typeof globalThis.VideoDecoder < "u" && (yield globalThis.VideoDecoder.isConfigSupported(o)).supported)
2170
+ return {
2171
+ VideoDecoder: globalThis.VideoDecoder,
2172
+ EncodedVideoChunk: globalThis.EncodedVideoChunk,
2173
+ VideoFrame: globalThis.VideoFrame
2174
+ };
2175
+ } catch {
2176
+ }
2177
+ if ((yield q.isConfigSupported(o)).supported)
2178
+ return {
2179
+ VideoDecoder: q,
2180
+ EncodedVideoChunk: H,
2181
+ VideoFrame: S
2182
+ };
2183
+ throw new W();
2184
+ });
2185
+ }
2186
+ function Oe(o) {
2187
+ return J(this, void 0, void 0, function* () {
2188
+ try {
2189
+ if (typeof globalThis.AudioEncoder < "u" && (yield globalThis.AudioEncoder.isConfigSupported(o)).supported)
2190
+ return {
2191
+ AudioEncoder: globalThis.AudioEncoder,
2192
+ EncodedAudioChunk: globalThis.EncodedAudioChunk,
2193
+ AudioData: globalThis.AudioData
2194
+ };
2195
+ } catch {
2196
+ }
2197
+ if ((yield X.isConfigSupported(o)).supported)
2198
+ return {
2199
+ AudioEncoder: X,
2200
+ EncodedAudioChunk: T,
2201
+ AudioData: B
2202
+ };
2203
+ throw new W();
2204
+ });
2205
+ }
2206
+ function Ce(o) {
2207
+ return J(this, void 0, void 0, function* () {
2208
+ try {
2209
+ if (typeof globalThis.VideoEncoder < "u" && (yield globalThis.VideoEncoder.isConfigSupported(o)).supported)
2210
+ return {
2211
+ VideoEncoder: globalThis.VideoEncoder,
2212
+ EncodedVideoChunk: globalThis.EncodedVideoChunk,
2213
+ VideoFrame: globalThis.VideoFrame
2214
+ };
2215
+ } catch {
2216
+ }
2217
+ if ((yield j.isConfigSupported(o)).supported)
2218
+ return {
2219
+ VideoEncoder: j,
2220
+ EncodedVideoChunk: H,
2221
+ VideoFrame: S
2222
+ };
2223
+ throw new W();
2224
+ });
2225
+ }
2226
+ var Se = function(o, e, t, i) {
2227
+ function s(r) {
2228
+ return r instanceof t ? r : new t(function(a) {
2229
+ a(r);
2230
+ });
2231
+ }
2232
+ return new (t || (t = Promise))(function(r, a) {
2233
+ function n(c) {
2234
+ try {
2235
+ l(i.next(c));
2236
+ } catch (h) {
2237
+ a(h);
2238
+ }
2239
+ }
2240
+ function d(c) {
2241
+ try {
2242
+ l(i.throw(c));
2243
+ } catch (h) {
2244
+ a(h);
2245
+ }
2246
+ }
2247
+ function l(c) {
2248
+ c.done ? r(c.value) : s(c.value).then(n, d);
2249
+ }
2250
+ l((i = i.apply(o, e || [])).next());
2251
+ });
2252
+ };
2253
+ function Le(o = {}) {
2254
+ return Se(this, void 0, void 0, function* () {
2255
+ let e = {};
2256
+ if (o.libavOptions && Object.assign(e, o.libavOptions), !o.LibAV && typeof globalThis.LibAV > "u" && (yield new Promise((t, i) => {
2257
+ e.noworker = !0;
2258
+ const s = "https://cdn.jsdelivr.net/npm/@libav.js/variant-webm-vp9@6.7.7/dist";
2259
+ globalThis.LibAV = { base: s };
2260
+ const r = "libav-6.7.7.1.1-webm-vp9.js";
2261
+ if (typeof importScripts < "u")
2262
+ importScripts(`${s}/${r}`), t(void 0);
2263
+ else {
2264
+ const a = document.createElement("script");
2265
+ a.src = `${s}/${r}`, a.onload = t, a.onerror = i, document.body.appendChild(a);
2266
+ }
2267
+ })), o.LibAV && me(o.LibAV), ye(e), yield be(), o.polyfill)
2268
+ for (const t of [
2269
+ ["EncodedAudioChunk", T],
2270
+ ["AudioData", B],
2271
+ ["AudioDecoder", $],
2272
+ ["AudioEncoder", X],
2273
+ ["EncodedVideoChunk", H],
2274
+ ["VideoFrame", S],
2275
+ ["VideoDecoder", q],
2276
+ ["VideoEncoder", j]
2277
+ ])
2278
+ globalThis[t[0]] || (globalThis[t[0]] = t[1]);
2279
+ yield xe(e, !!o.polyfill);
2280
+ });
2281
+ }
2282
+ const Be = T, He = B, We = $, ze = X, Ge = H, Ue = S, Ne = q, $e = j, Xe = ne, qe = _e, je = W, Qe = Ve, Ye = ke, Je = Oe, Ke = Ce;
2283
+ export {
2284
+ He as AudioData,
2285
+ We as AudioDecoder,
2286
+ ze as AudioEncoder,
2287
+ Be as EncodedAudioChunk,
2288
+ Ge as EncodedVideoChunk,
2289
+ je as UnsupportedException,
2290
+ Ne as VideoDecoder,
2291
+ $e as VideoEncoder,
2292
+ Ue as VideoFrame,
2293
+ Xe as canvasDrawImage,
2294
+ qe as createImageBitmap,
2295
+ Qe as getAudioDecoder,
2296
+ Je as getAudioEncoder,
2297
+ Ye as getVideoDecoder,
2298
+ Ke as getVideoEncoder,
2299
+ Le as load
2300
+ };
2301
+ //# sourceMappingURL=main-DGBFe0O7.js.map