webcodecs-examples 0.1.0 → 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +1454 -182
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -1,55 +1,54 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
return (r[s[e + 0]] + r[s[e + 1]] + r[s[e + 2]] + r[s[e + 3]] + "-" + r[s[e + 4]] + r[s[e + 5]] + "-" + r[s[e + 6]] + r[s[e + 7]] + "-" + r[s[e + 8]] + r[s[e + 9]] + "-" + r[s[e + 10]] + r[s[e + 11]] + r[s[e + 12]] + r[s[e + 13]] + r[s[e + 14]] + r[s[e + 15]]).toLowerCase();
|
|
1
|
+
const k = [];
|
|
2
|
+
for (let e = 0; e < 256; ++e)
|
|
3
|
+
k.push((e + 256).toString(16).slice(1));
|
|
4
|
+
function ft(e, t = 0) {
|
|
5
|
+
return (k[e[t + 0]] + k[e[t + 1]] + k[e[t + 2]] + k[e[t + 3]] + "-" + k[e[t + 4]] + k[e[t + 5]] + "-" + k[e[t + 6]] + k[e[t + 7]] + "-" + k[e[t + 8]] + k[e[t + 9]] + "-" + k[e[t + 10]] + k[e[t + 11]] + k[e[t + 12]] + k[e[t + 13]] + k[e[t + 14]] + k[e[t + 15]]).toLowerCase();
|
|
7
6
|
}
|
|
8
|
-
let
|
|
9
|
-
const
|
|
10
|
-
function
|
|
11
|
-
if (!
|
|
7
|
+
let Ce;
|
|
8
|
+
const mt = new Uint8Array(16);
|
|
9
|
+
function ct() {
|
|
10
|
+
if (!Ce) {
|
|
12
11
|
if (typeof crypto > "u" || !crypto.getRandomValues)
|
|
13
12
|
throw new Error("crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported");
|
|
14
|
-
|
|
13
|
+
Ce = crypto.getRandomValues.bind(crypto);
|
|
15
14
|
}
|
|
16
|
-
return
|
|
15
|
+
return Ce(mt);
|
|
17
16
|
}
|
|
18
|
-
const
|
|
19
|
-
function
|
|
20
|
-
if (
|
|
21
|
-
return
|
|
22
|
-
|
|
23
|
-
const
|
|
24
|
-
return
|
|
17
|
+
const pt = typeof crypto < "u" && crypto.randomUUID && crypto.randomUUID.bind(crypto), je = { randomUUID: pt };
|
|
18
|
+
function Xe(e, t, i) {
|
|
19
|
+
if (je.randomUUID && !e)
|
|
20
|
+
return je.randomUUID();
|
|
21
|
+
e = e || {};
|
|
22
|
+
const r = e.random || (e.rng || ct)();
|
|
23
|
+
return r[6] = r[6] & 15 | 64, r[8] = r[8] & 63 | 128, ft(r);
|
|
25
24
|
}
|
|
26
|
-
class
|
|
27
|
-
constructor(
|
|
28
|
-
this.worker = new Worker(
|
|
25
|
+
class Se {
|
|
26
|
+
constructor(t, i = [], r) {
|
|
27
|
+
this.worker = new Worker(t, { type: "module" }), this.listeners = {}, this.persistentEvents = i, this.worker.onmessage = this.handleWorkerMessage.bind(this), r && (this.port = r, this.worker.postMessage({ cmd: "port", data: r }, { transfer: [r] }));
|
|
29
28
|
}
|
|
30
|
-
setupPort(
|
|
31
|
-
this.port =
|
|
29
|
+
setupPort(t) {
|
|
30
|
+
this.port = t, this.worker.postMessage({ cmd: "port", data: t }, { transfer: [t] });
|
|
32
31
|
}
|
|
33
|
-
handleWorkerMessage(
|
|
34
|
-
this.listeners[
|
|
32
|
+
handleWorkerMessage(t) {
|
|
33
|
+
this.listeners[t.data.request_id] && (this.listeners[t.data.request_id](t.data.res), this.persistentEvents.includes(t.data.request_id) || delete this.listeners[t.data.request_id]);
|
|
35
34
|
}
|
|
36
|
-
addPersistentListener(
|
|
37
|
-
this.persistentEvents.includes(
|
|
35
|
+
addPersistentListener(t, i) {
|
|
36
|
+
this.persistentEvents.includes(t) || this.persistentEvents.push(t), this.listeners[t] = i;
|
|
38
37
|
}
|
|
39
|
-
async sendMessage(
|
|
40
|
-
const
|
|
41
|
-
return new Promise((
|
|
38
|
+
async sendMessage(t, i = {}, r = [], a = !0) {
|
|
39
|
+
const n = Xe(), o = a ? ke(i) : i;
|
|
40
|
+
return new Promise((l, p) => {
|
|
42
41
|
try {
|
|
43
|
-
const
|
|
44
|
-
cmd:
|
|
45
|
-
request_id:
|
|
46
|
-
data:
|
|
42
|
+
const y = {
|
|
43
|
+
cmd: t,
|
|
44
|
+
request_id: n,
|
|
45
|
+
data: o
|
|
47
46
|
};
|
|
48
|
-
this.worker.postMessage(
|
|
49
|
-
|
|
47
|
+
this.worker.postMessage(y, r), this.listeners[n] = (_) => {
|
|
48
|
+
l(_);
|
|
50
49
|
};
|
|
51
|
-
} catch (
|
|
52
|
-
|
|
50
|
+
} catch (y) {
|
|
51
|
+
p(new Error("Failed to send message to worker: " + y));
|
|
53
52
|
}
|
|
54
53
|
});
|
|
55
54
|
}
|
|
@@ -57,10 +56,10 @@ class f {
|
|
|
57
56
|
this.worker.terminate(), this.listeners = {};
|
|
58
57
|
}
|
|
59
58
|
}
|
|
60
|
-
function
|
|
61
|
-
if (
|
|
62
|
-
return
|
|
63
|
-
const
|
|
59
|
+
function ke(e) {
|
|
60
|
+
if (e === null || typeof e != "object")
|
|
61
|
+
return e;
|
|
62
|
+
const t = [
|
|
64
63
|
ArrayBuffer,
|
|
65
64
|
Blob,
|
|
66
65
|
EncodedVideoChunk,
|
|
@@ -73,24 +72,1297 @@ function g(s) {
|
|
|
73
72
|
MessageChannel,
|
|
74
73
|
MessagePort
|
|
75
74
|
];
|
|
76
|
-
for (const
|
|
77
|
-
if (
|
|
78
|
-
return
|
|
79
|
-
if (Array.isArray(
|
|
80
|
-
return
|
|
81
|
-
const
|
|
82
|
-
for (const
|
|
83
|
-
if (
|
|
75
|
+
for (const a of t)
|
|
76
|
+
if (e instanceof a)
|
|
77
|
+
return e;
|
|
78
|
+
if (Array.isArray(e))
|
|
79
|
+
return e.map((a) => ke(a));
|
|
80
|
+
const i = e && e.__v_raw ? e.__v_raw : e, r = {};
|
|
81
|
+
for (const a in i) {
|
|
82
|
+
if (a.startsWith("__v_"))
|
|
84
83
|
continue;
|
|
85
|
-
const
|
|
86
|
-
typeof
|
|
84
|
+
const n = i[a];
|
|
85
|
+
typeof n != "function" && (r[a] = ke(n));
|
|
87
86
|
}
|
|
88
|
-
return
|
|
87
|
+
return r;
|
|
89
88
|
}
|
|
90
|
-
|
|
91
|
-
|
|
89
|
+
var xe = (e, t, i) => {
|
|
90
|
+
if (!t.has(e))
|
|
91
|
+
throw TypeError("Cannot " + i);
|
|
92
|
+
}, s = (e, t, i) => (xe(e, t, "read from private field"), i ? i.call(e) : t.get(e)), f = (e, t, i) => {
|
|
93
|
+
if (t.has(e))
|
|
94
|
+
throw TypeError("Cannot add the same private member more than once");
|
|
95
|
+
t instanceof WeakSet ? t.add(e) : t.set(e, i);
|
|
96
|
+
}, S = (e, t, i, r) => (xe(e, t, "write to private field"), t.set(e, i), i), gt = (e, t, i, r) => ({
|
|
97
|
+
set _(a) {
|
|
98
|
+
S(e, t, a);
|
|
99
|
+
},
|
|
100
|
+
get _() {
|
|
101
|
+
return s(e, t, r);
|
|
102
|
+
}
|
|
103
|
+
}), m = (e, t, i) => (xe(e, t, "access private method"), i), c = new Uint8Array(8), O = new DataView(c.buffer), C = (e) => [(e % 256 + 256) % 256], g = (e) => (O.setUint16(0, e, !1), [c[0], c[1]]), wt = (e) => (O.setInt16(0, e, !1), [c[0], c[1]]), Ze = (e) => (O.setUint32(0, e, !1), [c[1], c[2], c[3]]), h = (e) => (O.setUint32(0, e, !1), [c[0], c[1], c[2], c[3]]), yt = (e) => (O.setInt32(0, e, !1), [c[0], c[1], c[2], c[3]]), j = (e) => (O.setUint32(0, Math.floor(e / 2 ** 32), !1), O.setUint32(4, e, !1), [c[0], c[1], c[2], c[3], c[4], c[5], c[6], c[7]]), Oe = (e) => (O.setInt16(0, 2 ** 8 * e, !1), [c[0], c[1]]), I = (e) => (O.setInt32(0, 2 ** 16 * e, !1), [c[0], c[1], c[2], c[3]]), Te = (e) => (O.setInt32(0, 2 ** 30 * e, !1), [c[0], c[1], c[2], c[3]]), M = (e, t = !1) => {
|
|
104
|
+
let i = Array(e.length).fill(null).map((r, a) => e.charCodeAt(a));
|
|
105
|
+
return t && i.push(0), i;
|
|
106
|
+
}, me = (e) => e && e[e.length - 1], Be = (e) => {
|
|
107
|
+
let t;
|
|
108
|
+
for (let i of e)
|
|
109
|
+
(!t || i.presentationTimestamp > t.presentationTimestamp) && (t = i);
|
|
110
|
+
return t;
|
|
111
|
+
}, x = (e, t, i = !0) => {
|
|
112
|
+
let r = e * t;
|
|
113
|
+
return i ? Math.round(r) : r;
|
|
114
|
+
}, Ke = (e) => {
|
|
115
|
+
let t = e * (Math.PI / 180), i = Math.cos(t), r = Math.sin(t);
|
|
116
|
+
return [
|
|
117
|
+
i,
|
|
118
|
+
r,
|
|
119
|
+
0,
|
|
120
|
+
-r,
|
|
121
|
+
i,
|
|
122
|
+
0,
|
|
123
|
+
0,
|
|
124
|
+
0,
|
|
125
|
+
1
|
|
126
|
+
];
|
|
127
|
+
}, Qe = Ke(0), Ye = (e) => [
|
|
128
|
+
I(e[0]),
|
|
129
|
+
I(e[1]),
|
|
130
|
+
Te(e[2]),
|
|
131
|
+
I(e[3]),
|
|
132
|
+
I(e[4]),
|
|
133
|
+
Te(e[5]),
|
|
134
|
+
I(e[6]),
|
|
135
|
+
I(e[7]),
|
|
136
|
+
Te(e[8])
|
|
137
|
+
], ee = (e) => !e || typeof e != "object" ? e : Array.isArray(e) ? e.map(ee) : Object.fromEntries(Object.entries(e).map(([t, i]) => [t, ee(i)])), X = (e) => e >= 0 && e < 2 ** 32, v = (e, t, i) => ({
|
|
138
|
+
type: e,
|
|
139
|
+
contents: t && new Uint8Array(t.flat(10)),
|
|
140
|
+
children: i
|
|
141
|
+
}), w = (e, t, i, r, a) => v(
|
|
142
|
+
e,
|
|
143
|
+
[C(t), Ze(i), r ?? []],
|
|
144
|
+
a
|
|
145
|
+
), vt = (e) => {
|
|
146
|
+
let t = 512;
|
|
147
|
+
return e.fragmented ? v("ftyp", [
|
|
148
|
+
M("iso5"),
|
|
149
|
+
// Major brand
|
|
150
|
+
h(t),
|
|
151
|
+
// Minor version
|
|
152
|
+
// Compatible brands
|
|
153
|
+
M("iso5"),
|
|
154
|
+
M("iso6"),
|
|
155
|
+
M("mp41")
|
|
156
|
+
]) : v("ftyp", [
|
|
157
|
+
M("isom"),
|
|
158
|
+
// Major brand
|
|
159
|
+
h(t),
|
|
160
|
+
// Minor version
|
|
161
|
+
// Compatible brands
|
|
162
|
+
M("isom"),
|
|
163
|
+
e.holdsAvc ? M("avc1") : [],
|
|
164
|
+
M("mp41")
|
|
165
|
+
]);
|
|
166
|
+
}, be = (e) => ({ type: "mdat", largeSize: e }), Ct = (e) => ({ type: "free", size: e }), he = (e, t, i = !1) => v("moov", null, [
|
|
167
|
+
Tt(t, e),
|
|
168
|
+
...e.map((r) => St(r, t)),
|
|
169
|
+
i ? Jt(e) : null
|
|
170
|
+
]), Tt = (e, t) => {
|
|
171
|
+
let i = x(Math.max(
|
|
172
|
+
0,
|
|
173
|
+
...t.filter((o) => o.samples.length > 0).map((o) => {
|
|
174
|
+
const l = Be(o.samples);
|
|
175
|
+
return l.presentationTimestamp + l.duration;
|
|
176
|
+
})
|
|
177
|
+
), ze), r = Math.max(...t.map((o) => o.id)) + 1, a = !X(e) || !X(i), n = a ? j : h;
|
|
178
|
+
return w("mvhd", +a, 0, [
|
|
179
|
+
n(e),
|
|
180
|
+
// Creation time
|
|
181
|
+
n(e),
|
|
182
|
+
// Modification time
|
|
183
|
+
h(ze),
|
|
184
|
+
// Timescale
|
|
185
|
+
n(i),
|
|
186
|
+
// Duration
|
|
187
|
+
I(1),
|
|
188
|
+
// Preferred rate
|
|
189
|
+
Oe(1),
|
|
190
|
+
// Preferred volume
|
|
191
|
+
Array(10).fill(0),
|
|
192
|
+
// Reserved
|
|
193
|
+
Ye(Qe),
|
|
194
|
+
// Matrix
|
|
195
|
+
Array(24).fill(0),
|
|
196
|
+
// Pre-defined
|
|
197
|
+
h(r)
|
|
198
|
+
// Next track ID
|
|
199
|
+
]);
|
|
200
|
+
}, St = (e, t) => v("trak", null, [
|
|
201
|
+
kt(e, t),
|
|
202
|
+
bt(e, t)
|
|
203
|
+
]), kt = (e, t) => {
|
|
204
|
+
let i = Be(e.samples), r = x(
|
|
205
|
+
i ? i.presentationTimestamp + i.duration : 0,
|
|
206
|
+
ze
|
|
207
|
+
), a = !X(t) || !X(r), n = a ? j : h, o;
|
|
208
|
+
return e.info.type === "video" ? o = typeof e.info.rotation == "number" ? Ke(e.info.rotation) : e.info.rotation : o = Qe, w("tkhd", +a, 3, [
|
|
209
|
+
n(t),
|
|
210
|
+
// Creation time
|
|
211
|
+
n(t),
|
|
212
|
+
// Modification time
|
|
213
|
+
h(e.id),
|
|
214
|
+
// Track ID
|
|
215
|
+
h(0),
|
|
216
|
+
// Reserved
|
|
217
|
+
n(r),
|
|
218
|
+
// Duration
|
|
219
|
+
Array(8).fill(0),
|
|
220
|
+
// Reserved
|
|
221
|
+
g(0),
|
|
222
|
+
// Layer
|
|
223
|
+
g(0),
|
|
224
|
+
// Alternate group
|
|
225
|
+
Oe(e.info.type === "audio" ? 1 : 0),
|
|
226
|
+
// Volume
|
|
227
|
+
g(0),
|
|
228
|
+
// Reserved
|
|
229
|
+
Ye(o),
|
|
230
|
+
// Matrix
|
|
231
|
+
I(e.info.type === "video" ? e.info.width : 0),
|
|
232
|
+
// Track width
|
|
233
|
+
I(e.info.type === "video" ? e.info.height : 0)
|
|
234
|
+
// Track height
|
|
235
|
+
]);
|
|
236
|
+
}, bt = (e, t) => v("mdia", null, [
|
|
237
|
+
_t(e, t),
|
|
238
|
+
Et(e.info.type === "video" ? "vide" : "soun"),
|
|
239
|
+
zt(e)
|
|
240
|
+
]), _t = (e, t) => {
|
|
241
|
+
let i = Be(e.samples), r = x(
|
|
242
|
+
i ? i.presentationTimestamp + i.duration : 0,
|
|
243
|
+
e.timescale
|
|
244
|
+
), a = !X(t) || !X(r), n = a ? j : h;
|
|
245
|
+
return w("mdhd", +a, 0, [
|
|
246
|
+
n(t),
|
|
247
|
+
// Creation time
|
|
248
|
+
n(t),
|
|
249
|
+
// Modification time
|
|
250
|
+
h(e.timescale),
|
|
251
|
+
// Timescale
|
|
252
|
+
n(r),
|
|
253
|
+
// Duration
|
|
254
|
+
g(21956),
|
|
255
|
+
// Language ("und", undetermined)
|
|
256
|
+
g(0)
|
|
257
|
+
// Quality
|
|
258
|
+
]);
|
|
259
|
+
}, Et = (e) => w("hdlr", 0, 0, [
|
|
260
|
+
M("mhlr"),
|
|
261
|
+
// Component type
|
|
262
|
+
M(e),
|
|
263
|
+
// Component subtype
|
|
264
|
+
h(0),
|
|
265
|
+
// Component manufacturer
|
|
266
|
+
h(0),
|
|
267
|
+
// Component flags
|
|
268
|
+
h(0),
|
|
269
|
+
// Component flags mask
|
|
270
|
+
M("mp4-muxer-hdlr", !0)
|
|
271
|
+
// Component name
|
|
272
|
+
]), zt = (e) => v("minf", null, [
|
|
273
|
+
e.info.type === "video" ? At() : Mt(),
|
|
274
|
+
Pt(),
|
|
275
|
+
xt(e)
|
|
276
|
+
]), At = () => w("vmhd", 0, 1, [
|
|
277
|
+
g(0),
|
|
278
|
+
// Graphics mode
|
|
279
|
+
g(0),
|
|
280
|
+
// Opcolor R
|
|
281
|
+
g(0),
|
|
282
|
+
// Opcolor G
|
|
283
|
+
g(0)
|
|
284
|
+
// Opcolor B
|
|
285
|
+
]), Mt = () => w("smhd", 0, 0, [
|
|
286
|
+
g(0),
|
|
287
|
+
// Balance
|
|
288
|
+
g(0)
|
|
289
|
+
// Reserved
|
|
290
|
+
]), Pt = () => v("dinf", null, [
|
|
291
|
+
Dt()
|
|
292
|
+
]), Dt = () => w("dref", 0, 0, [
|
|
293
|
+
h(1)
|
|
294
|
+
// Entry count
|
|
295
|
+
], [
|
|
296
|
+
It()
|
|
297
|
+
]), It = () => w("url ", 0, 1), xt = (e) => {
|
|
298
|
+
const t = e.compositionTimeOffsetTable.length > 1 || e.compositionTimeOffsetTable.some((i) => i.sampleCompositionTimeOffset !== 0);
|
|
299
|
+
return v("stbl", null, [
|
|
300
|
+
Ot(e),
|
|
301
|
+
Gt(e),
|
|
302
|
+
Xt(e),
|
|
303
|
+
Zt(e),
|
|
304
|
+
Kt(e),
|
|
305
|
+
Qt(e),
|
|
306
|
+
t ? Yt(e) : null
|
|
307
|
+
]);
|
|
308
|
+
}, Ot = (e) => w("stsd", 0, 0, [
|
|
309
|
+
h(1)
|
|
310
|
+
// Entry count
|
|
311
|
+
], [
|
|
312
|
+
e.info.type === "video" ? Bt(
|
|
313
|
+
li[e.info.codec],
|
|
314
|
+
e
|
|
315
|
+
) : jt(
|
|
316
|
+
ui[e.info.codec],
|
|
317
|
+
e
|
|
318
|
+
)
|
|
319
|
+
]), Bt = (e, t) => v(e, [
|
|
320
|
+
Array(6).fill(0),
|
|
321
|
+
// Reserved
|
|
322
|
+
g(1),
|
|
323
|
+
// Data reference index
|
|
324
|
+
g(0),
|
|
325
|
+
// Pre-defined
|
|
326
|
+
g(0),
|
|
327
|
+
// Reserved
|
|
328
|
+
Array(12).fill(0),
|
|
329
|
+
// Pre-defined
|
|
330
|
+
g(t.info.width),
|
|
331
|
+
// Width
|
|
332
|
+
g(t.info.height),
|
|
333
|
+
// Height
|
|
334
|
+
h(4718592),
|
|
335
|
+
// Horizontal resolution
|
|
336
|
+
h(4718592),
|
|
337
|
+
// Vertical resolution
|
|
338
|
+
h(0),
|
|
339
|
+
// Reserved
|
|
340
|
+
g(1),
|
|
341
|
+
// Frame count
|
|
342
|
+
Array(32).fill(0),
|
|
343
|
+
// Compressor name
|
|
344
|
+
g(24),
|
|
345
|
+
// Depth
|
|
346
|
+
wt(65535)
|
|
347
|
+
// Pre-defined
|
|
348
|
+
], [
|
|
349
|
+
di[t.info.codec](t),
|
|
350
|
+
t.info.decoderConfig.colorSpace ? Nt(t) : null
|
|
351
|
+
]), Ft = {
|
|
352
|
+
bt709: 1,
|
|
353
|
+
// ITU-R BT.709
|
|
354
|
+
bt470bg: 5,
|
|
355
|
+
// ITU-R BT.470BG
|
|
356
|
+
smpte170m: 6
|
|
357
|
+
// ITU-R BT.601 525 - SMPTE 170M
|
|
358
|
+
}, Ut = {
|
|
359
|
+
bt709: 1,
|
|
360
|
+
// ITU-R BT.709
|
|
361
|
+
smpte170m: 6,
|
|
362
|
+
// SMPTE 170M
|
|
363
|
+
"iec61966-2-1": 13
|
|
364
|
+
// IEC 61966-2-1
|
|
365
|
+
}, Wt = {
|
|
366
|
+
rgb: 0,
|
|
367
|
+
// Identity
|
|
368
|
+
bt709: 1,
|
|
369
|
+
// ITU-R BT.709
|
|
370
|
+
bt470bg: 5,
|
|
371
|
+
// ITU-R BT.470BG
|
|
372
|
+
smpte170m: 6
|
|
373
|
+
// SMPTE 170M
|
|
374
|
+
}, Nt = (e) => v("colr", [
|
|
375
|
+
M("nclx"),
|
|
376
|
+
// Colour type
|
|
377
|
+
g(Ft[e.info.decoderConfig.colorSpace.primaries]),
|
|
378
|
+
// Colour primaries
|
|
379
|
+
g(Ut[e.info.decoderConfig.colorSpace.transfer]),
|
|
380
|
+
// Transfer characteristics
|
|
381
|
+
g(Wt[e.info.decoderConfig.colorSpace.matrix]),
|
|
382
|
+
// Matrix coefficients
|
|
383
|
+
C((e.info.decoderConfig.colorSpace.fullRange ? 1 : 0) << 7)
|
|
384
|
+
// Full range flag
|
|
385
|
+
]), Rt = (e) => e.info.decoderConfig && v("avcC", [
|
|
386
|
+
// For AVC, description is an AVCDecoderConfigurationRecord, so nothing else to do here
|
|
387
|
+
...new Uint8Array(e.info.decoderConfig.description)
|
|
388
|
+
]), Vt = (e) => e.info.decoderConfig && v("hvcC", [
|
|
389
|
+
// For HEVC, description is a HEVCDecoderConfigurationRecord, so nothing else to do here
|
|
390
|
+
...new Uint8Array(e.info.decoderConfig.description)
|
|
391
|
+
]), Lt = (e) => {
|
|
392
|
+
if (!e.info.decoderConfig)
|
|
393
|
+
return null;
|
|
394
|
+
let t = e.info.decoderConfig;
|
|
395
|
+
if (!t.colorSpace)
|
|
396
|
+
throw new Error("'colorSpace' is required in the decoder config for VP9.");
|
|
397
|
+
let i = t.codec.split("."), r = Number(i[1]), a = Number(i[2]), l = (Number(i[3]) << 4) + (0 << 1) + Number(t.colorSpace.fullRange);
|
|
398
|
+
return w("vpcC", 1, 0, [
|
|
399
|
+
C(r),
|
|
400
|
+
// Profile
|
|
401
|
+
C(a),
|
|
402
|
+
// Level
|
|
403
|
+
C(l),
|
|
404
|
+
// Bit depth, chroma subsampling, full range
|
|
405
|
+
C(2),
|
|
406
|
+
// Colour primaries
|
|
407
|
+
C(2),
|
|
408
|
+
// Transfer characteristics
|
|
409
|
+
C(2),
|
|
410
|
+
// Matrix coefficients
|
|
411
|
+
g(0)
|
|
412
|
+
// Codec initialization data size
|
|
413
|
+
]);
|
|
414
|
+
}, $t = () => {
|
|
415
|
+
let i = (1 << 7) + 1;
|
|
416
|
+
return v("av1C", [
|
|
417
|
+
i,
|
|
418
|
+
0,
|
|
419
|
+
0,
|
|
420
|
+
0
|
|
421
|
+
]);
|
|
422
|
+
}, jt = (e, t) => v(e, [
|
|
423
|
+
Array(6).fill(0),
|
|
424
|
+
// Reserved
|
|
425
|
+
g(1),
|
|
426
|
+
// Data reference index
|
|
427
|
+
g(0),
|
|
428
|
+
// Version
|
|
429
|
+
g(0),
|
|
430
|
+
// Revision level
|
|
431
|
+
h(0),
|
|
432
|
+
// Vendor
|
|
433
|
+
g(t.info.numberOfChannels),
|
|
434
|
+
// Number of channels
|
|
435
|
+
g(16),
|
|
436
|
+
// Sample size (bits)
|
|
437
|
+
g(0),
|
|
438
|
+
// Compression ID
|
|
439
|
+
g(0),
|
|
440
|
+
// Packet size
|
|
441
|
+
I(t.info.sampleRate)
|
|
442
|
+
// Sample rate
|
|
443
|
+
], [
|
|
444
|
+
fi[t.info.codec](t)
|
|
445
|
+
]), qt = (e) => {
|
|
446
|
+
let t = new Uint8Array(e.info.decoderConfig.description);
|
|
447
|
+
return w("esds", 0, 0, [
|
|
448
|
+
// https://stackoverflow.com/a/54803118
|
|
449
|
+
h(58753152),
|
|
450
|
+
// TAG(3) = Object Descriptor ([2])
|
|
451
|
+
C(32 + t.byteLength),
|
|
452
|
+
// length of this OD (which includes the next 2 tags)
|
|
453
|
+
g(1),
|
|
454
|
+
// ES_ID = 1
|
|
455
|
+
C(0),
|
|
456
|
+
// flags etc = 0
|
|
457
|
+
h(75530368),
|
|
458
|
+
// TAG(4) = ES Descriptor ([2]) embedded in above OD
|
|
459
|
+
C(18 + t.byteLength),
|
|
460
|
+
// length of this ESD
|
|
461
|
+
C(64),
|
|
462
|
+
// MPEG-4 Audio
|
|
463
|
+
C(21),
|
|
464
|
+
// stream type(6bits)=5 audio, flags(2bits)=1
|
|
465
|
+
Ze(0),
|
|
466
|
+
// 24bit buffer size
|
|
467
|
+
h(130071),
|
|
468
|
+
// max bitrate
|
|
469
|
+
h(130071),
|
|
470
|
+
// avg bitrate
|
|
471
|
+
h(92307584),
|
|
472
|
+
// TAG(5) = ASC ([2],[3]) embedded in above OD
|
|
473
|
+
C(t.byteLength),
|
|
474
|
+
// length
|
|
475
|
+
...t,
|
|
476
|
+
h(109084800),
|
|
477
|
+
// TAG(6)
|
|
478
|
+
C(1),
|
|
479
|
+
// length
|
|
480
|
+
C(2)
|
|
481
|
+
// data
|
|
482
|
+
]);
|
|
483
|
+
}, Ht = (e) => {
|
|
484
|
+
var a;
|
|
485
|
+
let t = 3840, i = 0;
|
|
486
|
+
const r = (a = e.info.decoderConfig) == null ? void 0 : a.description;
|
|
487
|
+
if (r) {
|
|
488
|
+
if (r.byteLength < 18)
|
|
489
|
+
throw new TypeError("Invalid decoder description provided for Opus; must be at least 18 bytes long.");
|
|
490
|
+
const n = ArrayBuffer.isView(r) ? new DataView(r.buffer, r.byteOffset, r.byteLength) : new DataView(r);
|
|
491
|
+
t = n.getUint16(10, !0), i = n.getInt16(14, !0);
|
|
492
|
+
}
|
|
493
|
+
return v("dOps", [
|
|
494
|
+
C(0),
|
|
495
|
+
// Version
|
|
496
|
+
C(e.info.numberOfChannels),
|
|
497
|
+
// OutputChannelCount
|
|
498
|
+
g(t),
|
|
499
|
+
h(e.info.sampleRate),
|
|
500
|
+
// InputSampleRate
|
|
501
|
+
Oe(i),
|
|
502
|
+
// OutputGain
|
|
503
|
+
C(0)
|
|
504
|
+
// ChannelMappingFamily
|
|
505
|
+
]);
|
|
506
|
+
}, Gt = (e) => w("stts", 0, 0, [
|
|
507
|
+
h(e.timeToSampleTable.length),
|
|
508
|
+
// Number of entries
|
|
509
|
+
e.timeToSampleTable.map((t) => [
|
|
510
|
+
// Time-to-sample table
|
|
511
|
+
h(t.sampleCount),
|
|
512
|
+
// Sample count
|
|
513
|
+
h(t.sampleDelta)
|
|
514
|
+
// Sample duration
|
|
515
|
+
])
|
|
516
|
+
]), Xt = (e) => {
|
|
517
|
+
if (e.samples.every((i) => i.type === "key"))
|
|
518
|
+
return null;
|
|
519
|
+
let t = [...e.samples.entries()].filter(([, i]) => i.type === "key");
|
|
520
|
+
return w("stss", 0, 0, [
|
|
521
|
+
h(t.length),
|
|
522
|
+
// Number of entries
|
|
523
|
+
t.map(([i]) => h(i + 1))
|
|
524
|
+
// Sync sample table
|
|
525
|
+
]);
|
|
526
|
+
}, Zt = (e) => w("stsc", 0, 0, [
|
|
527
|
+
h(e.compactlyCodedChunkTable.length),
|
|
528
|
+
// Number of entries
|
|
529
|
+
e.compactlyCodedChunkTable.map((t) => [
|
|
530
|
+
// Sample-to-chunk table
|
|
531
|
+
h(t.firstChunk),
|
|
532
|
+
// First chunk
|
|
533
|
+
h(t.samplesPerChunk),
|
|
534
|
+
// Samples per chunk
|
|
535
|
+
h(1)
|
|
536
|
+
// Sample description index
|
|
537
|
+
])
|
|
538
|
+
]), Kt = (e) => w("stsz", 0, 0, [
|
|
539
|
+
h(0),
|
|
540
|
+
// Sample size (0 means non-constant size)
|
|
541
|
+
h(e.samples.length),
|
|
542
|
+
// Number of entries
|
|
543
|
+
e.samples.map((t) => h(t.size))
|
|
544
|
+
// Sample size table
|
|
545
|
+
]), Qt = (e) => e.finalizedChunks.length > 0 && me(e.finalizedChunks).offset >= 2 ** 32 ? w("co64", 0, 0, [
|
|
546
|
+
h(e.finalizedChunks.length),
|
|
547
|
+
// Number of entries
|
|
548
|
+
e.finalizedChunks.map((t) => j(t.offset))
|
|
549
|
+
// Chunk offset table
|
|
550
|
+
]) : w("stco", 0, 0, [
|
|
551
|
+
h(e.finalizedChunks.length),
|
|
552
|
+
// Number of entries
|
|
553
|
+
e.finalizedChunks.map((t) => h(t.offset))
|
|
554
|
+
// Chunk offset table
|
|
555
|
+
]), Yt = (e) => w("ctts", 0, 0, [
|
|
556
|
+
h(e.compositionTimeOffsetTable.length),
|
|
557
|
+
// Number of entries
|
|
558
|
+
e.compositionTimeOffsetTable.map((t) => [
|
|
559
|
+
// Time-to-sample table
|
|
560
|
+
h(t.sampleCount),
|
|
561
|
+
// Sample count
|
|
562
|
+
h(t.sampleCompositionTimeOffset)
|
|
563
|
+
// Sample offset
|
|
564
|
+
])
|
|
565
|
+
]), Jt = (e) => v("mvex", null, e.map(ei)), ei = (e) => w("trex", 0, 0, [
|
|
566
|
+
h(e.id),
|
|
567
|
+
// Track ID
|
|
568
|
+
h(1),
|
|
569
|
+
// Default sample description index
|
|
570
|
+
h(0),
|
|
571
|
+
// Default sample duration
|
|
572
|
+
h(0),
|
|
573
|
+
// Default sample size
|
|
574
|
+
h(0)
|
|
575
|
+
// Default sample flags
|
|
576
|
+
]), qe = (e, t) => v("moof", null, [
|
|
577
|
+
ti(e),
|
|
578
|
+
...t.map(ii)
|
|
579
|
+
]), ti = (e) => w("mfhd", 0, 0, [
|
|
580
|
+
h(e)
|
|
581
|
+
// Sequence number
|
|
582
|
+
]), Je = (e) => {
|
|
583
|
+
let t = 0, i = 0, r = 0, a = 0, n = e.type === "delta";
|
|
584
|
+
return i |= +n, n ? t |= 1 : t |= 2, t << 24 | i << 16 | r << 8 | a;
|
|
585
|
+
}, ii = (e) => v("traf", null, [
|
|
586
|
+
si(e),
|
|
587
|
+
ri(e),
|
|
588
|
+
ai(e)
|
|
589
|
+
]), si = (e) => {
|
|
590
|
+
let t = 0;
|
|
591
|
+
t |= 8, t |= 16, t |= 32, t |= 131072;
|
|
592
|
+
let i = e.currentChunk.samples[1] ?? e.currentChunk.samples[0], r = {
|
|
593
|
+
duration: i.timescaleUnitsToNextSample,
|
|
594
|
+
size: i.size,
|
|
595
|
+
flags: Je(i)
|
|
596
|
+
};
|
|
597
|
+
return w("tfhd", 0, t, [
|
|
598
|
+
h(e.id),
|
|
599
|
+
// Track ID
|
|
600
|
+
h(r.duration),
|
|
601
|
+
// Default sample duration
|
|
602
|
+
h(r.size),
|
|
603
|
+
// Default sample size
|
|
604
|
+
h(r.flags)
|
|
605
|
+
// Default sample flags
|
|
606
|
+
]);
|
|
607
|
+
}, ri = (e) => w("tfdt", 1, 0, [
|
|
608
|
+
j(x(e.currentChunk.startTimestamp, e.timescale))
|
|
609
|
+
// Base Media Decode Time
|
|
610
|
+
]), ai = (e) => {
|
|
611
|
+
let t = e.currentChunk.samples.map((B) => B.timescaleUnitsToNextSample), i = e.currentChunk.samples.map((B) => B.size), r = e.currentChunk.samples.map(Je), a = e.currentChunk.samples.map((B) => x(B.presentationTimestamp - B.decodeTimestamp, e.timescale)), n = new Set(t), o = new Set(i), l = new Set(r), p = new Set(a), y = l.size === 2 && r[0] !== r[1], _ = n.size > 1, W = o.size > 1, K = !y && l.size > 1, Q = p.size > 1 || [...p].some((B) => B !== 0), P = 0;
|
|
612
|
+
return P |= 1, P |= 4 * +y, P |= 256 * +_, P |= 512 * +W, P |= 1024 * +K, P |= 2048 * +Q, w("trun", 1, P, [
|
|
613
|
+
h(e.currentChunk.samples.length),
|
|
614
|
+
// Sample count
|
|
615
|
+
h(e.currentChunk.offset - e.currentChunk.moofOffset || 0),
|
|
616
|
+
// Data offset
|
|
617
|
+
y ? h(r[0]) : [],
|
|
618
|
+
e.currentChunk.samples.map((B, oe) => [
|
|
619
|
+
_ ? h(t[oe]) : [],
|
|
620
|
+
// Sample duration
|
|
621
|
+
W ? h(i[oe]) : [],
|
|
622
|
+
// Sample size
|
|
623
|
+
K ? h(r[oe]) : [],
|
|
624
|
+
// Sample flags
|
|
625
|
+
// Sample composition time offsets
|
|
626
|
+
Q ? yt(a[oe]) : []
|
|
627
|
+
])
|
|
628
|
+
]);
|
|
629
|
+
}, ni = (e) => v("mfra", null, [
|
|
630
|
+
...e.map(oi),
|
|
631
|
+
hi()
|
|
632
|
+
]), oi = (e, t) => w("tfra", 1, 0, [
|
|
633
|
+
h(e.id),
|
|
634
|
+
// Track ID
|
|
635
|
+
h(63),
|
|
636
|
+
// This specifies that traf number, trun number and sample number are 32-bit ints
|
|
637
|
+
h(e.finalizedChunks.length),
|
|
638
|
+
// Number of entries
|
|
639
|
+
e.finalizedChunks.map((r) => [
|
|
640
|
+
j(x(r.startTimestamp, e.timescale)),
|
|
641
|
+
// Time
|
|
642
|
+
j(r.moofOffset),
|
|
643
|
+
// moof offset
|
|
644
|
+
h(t + 1),
|
|
645
|
+
// traf number
|
|
646
|
+
h(1),
|
|
647
|
+
// trun number
|
|
648
|
+
h(1)
|
|
649
|
+
// Sample number
|
|
650
|
+
])
|
|
651
|
+
]), hi = () => w("mfro", 0, 0, [
|
|
652
|
+
// This value needs to be overwritten manually from the outside, where the actual size of the enclosing mfra box
|
|
653
|
+
// is known
|
|
654
|
+
h(0)
|
|
655
|
+
// Size
|
|
656
|
+
]), li = {
|
|
657
|
+
avc: "avc1",
|
|
658
|
+
hevc: "hvc1",
|
|
659
|
+
vp9: "vp09",
|
|
660
|
+
av1: "av01"
|
|
661
|
+
}, di = {
|
|
662
|
+
avc: Rt,
|
|
663
|
+
hevc: Vt,
|
|
664
|
+
vp9: Lt,
|
|
665
|
+
av1: $t
|
|
666
|
+
}, ui = {
|
|
667
|
+
aac: "mp4a",
|
|
668
|
+
opus: "Opus"
|
|
669
|
+
}, fi = {
|
|
670
|
+
aac: qt,
|
|
671
|
+
opus: Ht
|
|
672
|
+
}, ve = class {
|
|
673
|
+
}, et = class extends ve {
|
|
674
|
+
constructor() {
|
|
675
|
+
super(...arguments), this.buffer = null;
|
|
676
|
+
}
|
|
677
|
+
}, tt = class extends ve {
|
|
678
|
+
constructor(e) {
|
|
679
|
+
if (super(), this.options = e, typeof e != "object")
|
|
680
|
+
throw new TypeError("StreamTarget requires an options object to be passed to its constructor.");
|
|
681
|
+
if (e.onData) {
|
|
682
|
+
if (typeof e.onData != "function")
|
|
683
|
+
throw new TypeError("options.onData, when provided, must be a function.");
|
|
684
|
+
if (e.onData.length < 2)
|
|
685
|
+
throw new TypeError(
|
|
686
|
+
"options.onData, when provided, must be a function that takes in at least two arguments (data and position). Ignoring the position argument, which specifies the byte offset at which the data is to be written, can lead to broken outputs."
|
|
687
|
+
);
|
|
688
|
+
}
|
|
689
|
+
if (e.chunked !== void 0 && typeof e.chunked != "boolean")
|
|
690
|
+
throw new TypeError("options.chunked, when provided, must be a boolean.");
|
|
691
|
+
if (e.chunkSize !== void 0 && (!Number.isInteger(e.chunkSize) || e.chunkSize < 1024))
|
|
692
|
+
throw new TypeError("options.chunkSize, when provided, must be an integer and not smaller than 1024.");
|
|
693
|
+
}
|
|
694
|
+
}, mi = class extends ve {
|
|
695
|
+
constructor(e, t) {
|
|
696
|
+
if (super(), this.stream = e, this.options = t, !(e instanceof FileSystemWritableFileStream))
|
|
697
|
+
throw new TypeError("FileSystemWritableFileStreamTarget requires a FileSystemWritableFileStream instance.");
|
|
698
|
+
if (t !== void 0 && typeof t != "object")
|
|
699
|
+
throw new TypeError("FileSystemWritableFileStreamTarget's options, when provided, must be an object.");
|
|
700
|
+
if (t && t.chunkSize !== void 0 && (!Number.isInteger(t.chunkSize) || t.chunkSize <= 0))
|
|
701
|
+
throw new TypeError("options.chunkSize, when provided, must be a positive integer");
|
|
702
|
+
}
|
|
703
|
+
}, N, q, it = class {
|
|
704
|
+
constructor() {
|
|
705
|
+
this.pos = 0, f(this, N, new Uint8Array(8)), f(this, q, new DataView(s(this, N).buffer)), this.offsets = /* @__PURE__ */ new WeakMap();
|
|
706
|
+
}
|
|
707
|
+
/** Sets the current position for future writes to a new one. */
|
|
708
|
+
seek(e) {
|
|
709
|
+
this.pos = e;
|
|
710
|
+
}
|
|
711
|
+
writeU32(e) {
|
|
712
|
+
s(this, q).setUint32(0, e, !1), this.write(s(this, N).subarray(0, 4));
|
|
713
|
+
}
|
|
714
|
+
writeU64(e) {
|
|
715
|
+
s(this, q).setUint32(0, Math.floor(e / 2 ** 32), !1), s(this, q).setUint32(4, e, !1), this.write(s(this, N).subarray(0, 8));
|
|
716
|
+
}
|
|
717
|
+
writeAscii(e) {
|
|
718
|
+
for (let t = 0; t < e.length; t++)
|
|
719
|
+
s(this, q).setUint8(t % 8, e.charCodeAt(t)), t % 8 === 7 && this.write(s(this, N));
|
|
720
|
+
e.length % 8 !== 0 && this.write(s(this, N).subarray(0, e.length % 8));
|
|
721
|
+
}
|
|
722
|
+
writeBox(e) {
|
|
723
|
+
if (this.offsets.set(e, this.pos), e.contents && !e.children)
|
|
724
|
+
this.writeBoxHeader(e, e.size ?? e.contents.byteLength + 8), this.write(e.contents);
|
|
725
|
+
else {
|
|
726
|
+
let t = this.pos;
|
|
727
|
+
if (this.writeBoxHeader(e, 0), e.contents && this.write(e.contents), e.children)
|
|
728
|
+
for (let a of e.children)
|
|
729
|
+
a && this.writeBox(a);
|
|
730
|
+
let i = this.pos, r = e.size ?? i - t;
|
|
731
|
+
this.seek(t), this.writeBoxHeader(e, r), this.seek(i);
|
|
732
|
+
}
|
|
733
|
+
}
|
|
734
|
+
writeBoxHeader(e, t) {
|
|
735
|
+
this.writeU32(e.largeSize ? 1 : t), this.writeAscii(e.type), e.largeSize && this.writeU64(t);
|
|
736
|
+
}
|
|
737
|
+
measureBoxHeader(e) {
|
|
738
|
+
return 8 + (e.largeSize ? 8 : 0);
|
|
739
|
+
}
|
|
740
|
+
patchBox(e) {
|
|
741
|
+
let t = this.pos;
|
|
742
|
+
this.seek(this.offsets.get(e)), this.writeBox(e), this.seek(t);
|
|
743
|
+
}
|
|
744
|
+
measureBox(e) {
|
|
745
|
+
if (e.contents && !e.children)
|
|
746
|
+
return this.measureBoxHeader(e) + e.contents.byteLength;
|
|
747
|
+
{
|
|
748
|
+
let t = this.measureBoxHeader(e);
|
|
749
|
+
if (e.contents && (t += e.contents.byteLength), e.children)
|
|
750
|
+
for (let i of e.children)
|
|
751
|
+
i && (t += this.measureBox(i));
|
|
752
|
+
return t;
|
|
753
|
+
}
|
|
754
|
+
}
|
|
755
|
+
};
|
|
756
|
+
N = /* @__PURE__ */ new WeakMap();
|
|
757
|
+
q = /* @__PURE__ */ new WeakMap();
|
|
758
|
+
var le, $, ae, Y, de, _e, ci = class extends it {
|
|
759
|
+
constructor(e) {
|
|
760
|
+
super(), f(this, de), f(this, le, void 0), f(this, $, new ArrayBuffer(2 ** 16)), f(this, ae, new Uint8Array(s(this, $))), f(this, Y, 0), S(this, le, e);
|
|
761
|
+
}
|
|
762
|
+
write(e) {
|
|
763
|
+
m(this, de, _e).call(this, this.pos + e.byteLength), s(this, ae).set(e, this.pos), this.pos += e.byteLength, S(this, Y, Math.max(s(this, Y), this.pos));
|
|
764
|
+
}
|
|
765
|
+
finalize() {
|
|
766
|
+
m(this, de, _e).call(this, this.pos), s(this, le).buffer = s(this, $).slice(0, Math.max(s(this, Y), this.pos));
|
|
767
|
+
}
|
|
768
|
+
};
|
|
769
|
+
le = /* @__PURE__ */ new WeakMap();
|
|
770
|
+
$ = /* @__PURE__ */ new WeakMap();
|
|
771
|
+
ae = /* @__PURE__ */ new WeakMap();
|
|
772
|
+
Y = /* @__PURE__ */ new WeakMap();
|
|
773
|
+
de = /* @__PURE__ */ new WeakSet();
|
|
774
|
+
_e = function(e) {
|
|
775
|
+
let t = s(this, $).byteLength;
|
|
776
|
+
for (; t < e; )
|
|
777
|
+
t *= 2;
|
|
778
|
+
if (t === s(this, $).byteLength)
|
|
779
|
+
return;
|
|
780
|
+
let i = new ArrayBuffer(t), r = new Uint8Array(i);
|
|
781
|
+
r.set(s(this, ae), 0), S(this, $, i), S(this, ae, r);
|
|
782
|
+
};
|
|
783
|
+
var pi = 2 ** 24, gi = 2, te, R, J, U, A, ce, Ee, Fe, st, Ue, rt, ie, pe, We = class extends it {
|
|
784
|
+
constructor(e) {
|
|
785
|
+
var t, i;
|
|
786
|
+
super(), f(this, ce), f(this, Fe), f(this, Ue), f(this, ie), f(this, te, void 0), f(this, R, []), f(this, J, void 0), f(this, U, void 0), f(this, A, []), S(this, te, e), S(this, J, ((t = e.options) == null ? void 0 : t.chunked) ?? !1), S(this, U, ((i = e.options) == null ? void 0 : i.chunkSize) ?? pi);
|
|
787
|
+
}
|
|
788
|
+
write(e) {
|
|
789
|
+
s(this, R).push({
|
|
790
|
+
data: e.slice(),
|
|
791
|
+
start: this.pos
|
|
792
|
+
}), this.pos += e.byteLength;
|
|
793
|
+
}
|
|
794
|
+
flush() {
|
|
795
|
+
var i, r;
|
|
796
|
+
if (s(this, R).length === 0)
|
|
797
|
+
return;
|
|
798
|
+
let e = [], t = [...s(this, R)].sort((a, n) => a.start - n.start);
|
|
799
|
+
e.push({
|
|
800
|
+
start: t[0].start,
|
|
801
|
+
size: t[0].data.byteLength
|
|
802
|
+
});
|
|
803
|
+
for (let a = 1; a < t.length; a++) {
|
|
804
|
+
let n = e[e.length - 1], o = t[a];
|
|
805
|
+
o.start <= n.start + n.size ? n.size = Math.max(n.size, o.start + o.data.byteLength - n.start) : e.push({
|
|
806
|
+
start: o.start,
|
|
807
|
+
size: o.data.byteLength
|
|
808
|
+
});
|
|
809
|
+
}
|
|
810
|
+
for (let a of e) {
|
|
811
|
+
a.data = new Uint8Array(a.size);
|
|
812
|
+
for (let n of s(this, R))
|
|
813
|
+
a.start <= n.start && n.start < a.start + a.size && a.data.set(n.data, n.start - a.start);
|
|
814
|
+
s(this, J) ? (m(this, ce, Ee).call(this, a.data, a.start), m(this, ie, pe).call(this)) : (r = (i = s(this, te).options).onData) == null || r.call(i, a.data, a.start);
|
|
815
|
+
}
|
|
816
|
+
s(this, R).length = 0;
|
|
817
|
+
}
|
|
818
|
+
finalize() {
|
|
819
|
+
s(this, J) && m(this, ie, pe).call(this, !0);
|
|
820
|
+
}
|
|
821
|
+
};
|
|
822
|
+
te = /* @__PURE__ */ new WeakMap();
|
|
823
|
+
R = /* @__PURE__ */ new WeakMap();
|
|
824
|
+
J = /* @__PURE__ */ new WeakMap();
|
|
825
|
+
U = /* @__PURE__ */ new WeakMap();
|
|
826
|
+
A = /* @__PURE__ */ new WeakMap();
|
|
827
|
+
ce = /* @__PURE__ */ new WeakSet();
|
|
828
|
+
Ee = function(e, t) {
|
|
829
|
+
let i = s(this, A).findIndex((l) => l.start <= t && t < l.start + s(this, U));
|
|
830
|
+
i === -1 && (i = m(this, Ue, rt).call(this, t));
|
|
831
|
+
let r = s(this, A)[i], a = t - r.start, n = e.subarray(0, Math.min(s(this, U) - a, e.byteLength));
|
|
832
|
+
r.data.set(n, a);
|
|
833
|
+
let o = {
|
|
834
|
+
start: a,
|
|
835
|
+
end: a + n.byteLength
|
|
836
|
+
};
|
|
837
|
+
if (m(this, Fe, st).call(this, r, o), r.written[0].start === 0 && r.written[0].end === s(this, U) && (r.shouldFlush = !0), s(this, A).length > gi) {
|
|
838
|
+
for (let l = 0; l < s(this, A).length - 1; l++)
|
|
839
|
+
s(this, A)[l].shouldFlush = !0;
|
|
840
|
+
m(this, ie, pe).call(this);
|
|
841
|
+
}
|
|
842
|
+
n.byteLength < e.byteLength && m(this, ce, Ee).call(this, e.subarray(n.byteLength), t + n.byteLength);
|
|
843
|
+
};
|
|
844
|
+
Fe = /* @__PURE__ */ new WeakSet();
|
|
845
|
+
st = function(e, t) {
|
|
846
|
+
let i = 0, r = e.written.length - 1, a = -1;
|
|
847
|
+
for (; i <= r; ) {
|
|
848
|
+
let n = Math.floor(i + (r - i + 1) / 2);
|
|
849
|
+
e.written[n].start <= t.start ? (i = n + 1, a = n) : r = n - 1;
|
|
850
|
+
}
|
|
851
|
+
for (e.written.splice(a + 1, 0, t), (a === -1 || e.written[a].end < t.start) && a++; a < e.written.length - 1 && e.written[a].end >= e.written[a + 1].start; )
|
|
852
|
+
e.written[a].end = Math.max(e.written[a].end, e.written[a + 1].end), e.written.splice(a + 1, 1);
|
|
853
|
+
};
|
|
854
|
+
Ue = /* @__PURE__ */ new WeakSet();
|
|
855
|
+
rt = function(e) {
|
|
856
|
+
let i = {
|
|
857
|
+
start: Math.floor(e / s(this, U)) * s(this, U),
|
|
858
|
+
data: new Uint8Array(s(this, U)),
|
|
859
|
+
written: [],
|
|
860
|
+
shouldFlush: !1
|
|
861
|
+
};
|
|
862
|
+
return s(this, A).push(i), s(this, A).sort((r, a) => r.start - a.start), s(this, A).indexOf(i);
|
|
863
|
+
};
|
|
864
|
+
ie = /* @__PURE__ */ new WeakSet();
|
|
865
|
+
pe = function(e = !1) {
|
|
866
|
+
var t, i;
|
|
867
|
+
for (let r = 0; r < s(this, A).length; r++) {
|
|
868
|
+
let a = s(this, A)[r];
|
|
869
|
+
if (!(!a.shouldFlush && !e)) {
|
|
870
|
+
for (let n of a.written)
|
|
871
|
+
(i = (t = s(this, te).options).onData) == null || i.call(
|
|
872
|
+
t,
|
|
873
|
+
a.data.subarray(n.start, n.end),
|
|
874
|
+
a.start + n.start
|
|
875
|
+
);
|
|
876
|
+
s(this, A).splice(r--, 1);
|
|
877
|
+
}
|
|
878
|
+
}
|
|
879
|
+
};
|
|
880
|
+
var wi = class extends We {
|
|
881
|
+
constructor(e) {
|
|
882
|
+
var t;
|
|
883
|
+
super(new tt({
|
|
884
|
+
onData: (i, r) => e.stream.write({
|
|
885
|
+
type: "write",
|
|
886
|
+
data: i,
|
|
887
|
+
position: r
|
|
888
|
+
}),
|
|
889
|
+
chunked: !0,
|
|
890
|
+
chunkSize: (t = e.options) == null ? void 0 : t.chunkSize
|
|
891
|
+
}));
|
|
892
|
+
}
|
|
893
|
+
}, ze = 1e3, yi = ["avc", "hevc", "vp9", "av1"], vi = ["aac", "opus"], Ci = 2082844800, Ti = ["strict", "offset", "cross-track-offset"], d, u, ge, z, b, T, H, G, Ne, V, L, se, Ae, at, Me, nt, Re, ot, Pe, ht, Ve, lt, ue, De, D, F, Le, dt, re, we, ye, $e, Z, ne, fe, Ie, Si = class {
|
|
92
894
|
constructor(e) {
|
|
93
|
-
this
|
|
895
|
+
if (f(this, Ae), f(this, Me), f(this, Re), f(this, Pe), f(this, Ve), f(this, ue), f(this, D), f(this, Le), f(this, re), f(this, ye), f(this, Z), f(this, fe), f(this, d, void 0), f(this, u, void 0), f(this, ge, void 0), f(this, z, void 0), f(this, b, null), f(this, T, null), f(this, H, Math.floor(Date.now() / 1e3) + Ci), f(this, G, []), f(this, Ne, 1), f(this, V, []), f(this, L, []), f(this, se, !1), m(this, Ae, at).call(this, e), e.video = ee(e.video), e.audio = ee(e.audio), e.fastStart = ee(e.fastStart), this.target = e.target, S(this, d, {
|
|
896
|
+
firstTimestampBehavior: "strict",
|
|
897
|
+
...e
|
|
898
|
+
}), e.target instanceof et)
|
|
899
|
+
S(this, u, new ci(e.target));
|
|
900
|
+
else if (e.target instanceof tt)
|
|
901
|
+
S(this, u, new We(e.target));
|
|
902
|
+
else if (e.target instanceof mi)
|
|
903
|
+
S(this, u, new wi(e.target));
|
|
904
|
+
else
|
|
905
|
+
throw new Error(`Invalid target: ${e.target}`);
|
|
906
|
+
m(this, Pe, ht).call(this), m(this, Me, nt).call(this);
|
|
907
|
+
}
|
|
908
|
+
addVideoChunk(e, t, i, r) {
|
|
909
|
+
if (!(e instanceof EncodedVideoChunk))
|
|
910
|
+
throw new TypeError("addVideoChunk's first argument (sample) must be of type EncodedVideoChunk.");
|
|
911
|
+
if (t && typeof t != "object")
|
|
912
|
+
throw new TypeError("addVideoChunk's second argument (meta), when provided, must be an object.");
|
|
913
|
+
if (i !== void 0 && (!Number.isFinite(i) || i < 0))
|
|
914
|
+
throw new TypeError(
|
|
915
|
+
"addVideoChunk's third argument (timestamp), when provided, must be a non-negative real number."
|
|
916
|
+
);
|
|
917
|
+
if (r !== void 0 && !Number.isFinite(r))
|
|
918
|
+
throw new TypeError(
|
|
919
|
+
"addVideoChunk's fourth argument (compositionTimeOffset), when provided, must be a real number."
|
|
920
|
+
);
|
|
921
|
+
let a = new Uint8Array(e.byteLength);
|
|
922
|
+
e.copyTo(a), this.addVideoChunkRaw(
|
|
923
|
+
a,
|
|
924
|
+
e.type,
|
|
925
|
+
i ?? e.timestamp,
|
|
926
|
+
e.duration,
|
|
927
|
+
t,
|
|
928
|
+
r
|
|
929
|
+
);
|
|
930
|
+
}
|
|
931
|
+
addVideoChunkRaw(e, t, i, r, a, n) {
|
|
932
|
+
if (!(e instanceof Uint8Array))
|
|
933
|
+
throw new TypeError("addVideoChunkRaw's first argument (data) must be an instance of Uint8Array.");
|
|
934
|
+
if (t !== "key" && t !== "delta")
|
|
935
|
+
throw new TypeError("addVideoChunkRaw's second argument (type) must be either 'key' or 'delta'.");
|
|
936
|
+
if (!Number.isFinite(i) || i < 0)
|
|
937
|
+
throw new TypeError("addVideoChunkRaw's third argument (timestamp) must be a non-negative real number.");
|
|
938
|
+
if (!Number.isFinite(r) || r < 0)
|
|
939
|
+
throw new TypeError("addVideoChunkRaw's fourth argument (duration) must be a non-negative real number.");
|
|
940
|
+
if (a && typeof a != "object")
|
|
941
|
+
throw new TypeError("addVideoChunkRaw's fifth argument (meta), when provided, must be an object.");
|
|
942
|
+
if (n !== void 0 && !Number.isFinite(n))
|
|
943
|
+
throw new TypeError(
|
|
944
|
+
"addVideoChunkRaw's sixth argument (compositionTimeOffset), when provided, must be a real number."
|
|
945
|
+
);
|
|
946
|
+
if (m(this, fe, Ie).call(this), !s(this, d).video)
|
|
947
|
+
throw new Error("No video track declared.");
|
|
948
|
+
if (typeof s(this, d).fastStart == "object" && s(this, b).samples.length === s(this, d).fastStart.expectedVideoChunks)
|
|
949
|
+
throw new Error(`Cannot add more video chunks than specified in 'fastStart' (${s(this, d).fastStart.expectedVideoChunks}).`);
|
|
950
|
+
let o = m(this, ue, De).call(this, s(this, b), e, t, i, r, a, n);
|
|
951
|
+
if (s(this, d).fastStart === "fragmented" && s(this, T)) {
|
|
952
|
+
for (; s(this, L).length > 0 && s(this, L)[0].decodeTimestamp <= o.decodeTimestamp; ) {
|
|
953
|
+
let l = s(this, L).shift();
|
|
954
|
+
m(this, D, F).call(this, s(this, T), l);
|
|
955
|
+
}
|
|
956
|
+
o.decodeTimestamp <= s(this, T).lastDecodeTimestamp ? m(this, D, F).call(this, s(this, b), o) : s(this, V).push(o);
|
|
957
|
+
} else
|
|
958
|
+
m(this, D, F).call(this, s(this, b), o);
|
|
959
|
+
}
|
|
960
|
+
addAudioChunk(e, t, i) {
|
|
961
|
+
if (!(e instanceof EncodedAudioChunk))
|
|
962
|
+
throw new TypeError("addAudioChunk's first argument (sample) must be of type EncodedAudioChunk.");
|
|
963
|
+
if (t && typeof t != "object")
|
|
964
|
+
throw new TypeError("addAudioChunk's second argument (meta), when provided, must be an object.");
|
|
965
|
+
if (i !== void 0 && (!Number.isFinite(i) || i < 0))
|
|
966
|
+
throw new TypeError(
|
|
967
|
+
"addAudioChunk's third argument (timestamp), when provided, must be a non-negative real number."
|
|
968
|
+
);
|
|
969
|
+
let r = new Uint8Array(e.byteLength);
|
|
970
|
+
e.copyTo(r), this.addAudioChunkRaw(r, e.type, i ?? e.timestamp, e.duration, t);
|
|
971
|
+
}
|
|
972
|
+
addAudioChunkRaw(e, t, i, r, a) {
|
|
973
|
+
if (!(e instanceof Uint8Array))
|
|
974
|
+
throw new TypeError("addAudioChunkRaw's first argument (data) must be an instance of Uint8Array.");
|
|
975
|
+
if (t !== "key" && t !== "delta")
|
|
976
|
+
throw new TypeError("addAudioChunkRaw's second argument (type) must be either 'key' or 'delta'.");
|
|
977
|
+
if (!Number.isFinite(i) || i < 0)
|
|
978
|
+
throw new TypeError("addAudioChunkRaw's third argument (timestamp) must be a non-negative real number.");
|
|
979
|
+
if (!Number.isFinite(r) || r < 0)
|
|
980
|
+
throw new TypeError("addAudioChunkRaw's fourth argument (duration) must be a non-negative real number.");
|
|
981
|
+
if (a && typeof a != "object")
|
|
982
|
+
throw new TypeError("addAudioChunkRaw's fifth argument (meta), when provided, must be an object.");
|
|
983
|
+
if (m(this, fe, Ie).call(this), !s(this, d).audio)
|
|
984
|
+
throw new Error("No audio track declared.");
|
|
985
|
+
if (typeof s(this, d).fastStart == "object" && s(this, T).samples.length === s(this, d).fastStart.expectedAudioChunks)
|
|
986
|
+
throw new Error(`Cannot add more audio chunks than specified in 'fastStart' (${s(this, d).fastStart.expectedAudioChunks}).`);
|
|
987
|
+
let n = m(this, ue, De).call(this, s(this, T), e, t, i, r, a);
|
|
988
|
+
if (s(this, d).fastStart === "fragmented" && s(this, b)) {
|
|
989
|
+
for (; s(this, V).length > 0 && s(this, V)[0].decodeTimestamp <= n.decodeTimestamp; ) {
|
|
990
|
+
let o = s(this, V).shift();
|
|
991
|
+
m(this, D, F).call(this, s(this, b), o);
|
|
992
|
+
}
|
|
993
|
+
n.decodeTimestamp <= s(this, b).lastDecodeTimestamp ? m(this, D, F).call(this, s(this, T), n) : s(this, L).push(n);
|
|
994
|
+
} else
|
|
995
|
+
m(this, D, F).call(this, s(this, T), n);
|
|
996
|
+
}
|
|
997
|
+
/** Finalizes the file, making it ready for use. Must be called after all video and audio chunks have been added. */
|
|
998
|
+
finalize() {
|
|
999
|
+
if (s(this, se))
|
|
1000
|
+
throw new Error("Cannot finalize a muxer more than once.");
|
|
1001
|
+
if (s(this, d).fastStart === "fragmented") {
|
|
1002
|
+
for (let t of s(this, V))
|
|
1003
|
+
m(this, D, F).call(this, s(this, b), t);
|
|
1004
|
+
for (let t of s(this, L))
|
|
1005
|
+
m(this, D, F).call(this, s(this, T), t);
|
|
1006
|
+
m(this, ye, $e).call(this, !1);
|
|
1007
|
+
} else
|
|
1008
|
+
s(this, b) && m(this, re, we).call(this, s(this, b)), s(this, T) && m(this, re, we).call(this, s(this, T));
|
|
1009
|
+
let e = [s(this, b), s(this, T)].filter(Boolean);
|
|
1010
|
+
if (s(this, d).fastStart === "in-memory") {
|
|
1011
|
+
let t;
|
|
1012
|
+
for (let r = 0; r < 2; r++) {
|
|
1013
|
+
let a = he(e, s(this, H)), n = s(this, u).measureBox(a);
|
|
1014
|
+
t = s(this, u).measureBox(s(this, z));
|
|
1015
|
+
let o = s(this, u).pos + n + t;
|
|
1016
|
+
for (let l of s(this, G)) {
|
|
1017
|
+
l.offset = o;
|
|
1018
|
+
for (let { data: p } of l.samples)
|
|
1019
|
+
o += p.byteLength, t += p.byteLength;
|
|
1020
|
+
}
|
|
1021
|
+
if (o < 2 ** 32)
|
|
1022
|
+
break;
|
|
1023
|
+
t >= 2 ** 32 && (s(this, z).largeSize = !0);
|
|
1024
|
+
}
|
|
1025
|
+
let i = he(e, s(this, H));
|
|
1026
|
+
s(this, u).writeBox(i), s(this, z).size = t, s(this, u).writeBox(s(this, z));
|
|
1027
|
+
for (let r of s(this, G))
|
|
1028
|
+
for (let a of r.samples)
|
|
1029
|
+
s(this, u).write(a.data), a.data = null;
|
|
1030
|
+
} else if (s(this, d).fastStart === "fragmented") {
|
|
1031
|
+
let t = s(this, u).pos, i = ni(e);
|
|
1032
|
+
s(this, u).writeBox(i);
|
|
1033
|
+
let r = s(this, u).pos - t;
|
|
1034
|
+
s(this, u).seek(s(this, u).pos - 4), s(this, u).writeU32(r);
|
|
1035
|
+
} else {
|
|
1036
|
+
let t = s(this, u).offsets.get(s(this, z)), i = s(this, u).pos - t;
|
|
1037
|
+
s(this, z).size = i, s(this, z).largeSize = i >= 2 ** 32, s(this, u).patchBox(s(this, z));
|
|
1038
|
+
let r = he(e, s(this, H));
|
|
1039
|
+
if (typeof s(this, d).fastStart == "object") {
|
|
1040
|
+
s(this, u).seek(s(this, ge)), s(this, u).writeBox(r);
|
|
1041
|
+
let a = t - s(this, u).pos;
|
|
1042
|
+
s(this, u).writeBox(Ct(a));
|
|
1043
|
+
} else
|
|
1044
|
+
s(this, u).writeBox(r);
|
|
1045
|
+
}
|
|
1046
|
+
m(this, Z, ne).call(this), s(this, u).finalize(), S(this, se, !0);
|
|
1047
|
+
}
|
|
1048
|
+
};
|
|
1049
|
+
d = /* @__PURE__ */ new WeakMap();
|
|
1050
|
+
u = /* @__PURE__ */ new WeakMap();
|
|
1051
|
+
ge = /* @__PURE__ */ new WeakMap();
|
|
1052
|
+
z = /* @__PURE__ */ new WeakMap();
|
|
1053
|
+
b = /* @__PURE__ */ new WeakMap();
|
|
1054
|
+
T = /* @__PURE__ */ new WeakMap();
|
|
1055
|
+
H = /* @__PURE__ */ new WeakMap();
|
|
1056
|
+
G = /* @__PURE__ */ new WeakMap();
|
|
1057
|
+
Ne = /* @__PURE__ */ new WeakMap();
|
|
1058
|
+
V = /* @__PURE__ */ new WeakMap();
|
|
1059
|
+
L = /* @__PURE__ */ new WeakMap();
|
|
1060
|
+
se = /* @__PURE__ */ new WeakMap();
|
|
1061
|
+
Ae = /* @__PURE__ */ new WeakSet();
|
|
1062
|
+
at = function(e) {
|
|
1063
|
+
if (typeof e != "object")
|
|
1064
|
+
throw new TypeError("The muxer requires an options object to be passed to its constructor.");
|
|
1065
|
+
if (!(e.target instanceof ve))
|
|
1066
|
+
throw new TypeError("The target must be provided and an instance of Target.");
|
|
1067
|
+
if (e.video) {
|
|
1068
|
+
if (!yi.includes(e.video.codec))
|
|
1069
|
+
throw new TypeError(`Unsupported video codec: ${e.video.codec}`);
|
|
1070
|
+
if (!Number.isInteger(e.video.width) || e.video.width <= 0)
|
|
1071
|
+
throw new TypeError(`Invalid video width: ${e.video.width}. Must be a positive integer.`);
|
|
1072
|
+
if (!Number.isInteger(e.video.height) || e.video.height <= 0)
|
|
1073
|
+
throw new TypeError(`Invalid video height: ${e.video.height}. Must be a positive integer.`);
|
|
1074
|
+
const t = e.video.rotation;
|
|
1075
|
+
if (typeof t == "number" && ![0, 90, 180, 270].includes(t))
|
|
1076
|
+
throw new TypeError(`Invalid video rotation: ${t}. Has to be 0, 90, 180 or 270.`);
|
|
1077
|
+
if (Array.isArray(t) && (t.length !== 9 || t.some((i) => typeof i != "number")))
|
|
1078
|
+
throw new TypeError(`Invalid video transformation matrix: ${t.join()}`);
|
|
1079
|
+
if (e.video.frameRate !== void 0 && (!Number.isInteger(e.video.frameRate) || e.video.frameRate <= 0))
|
|
1080
|
+
throw new TypeError(
|
|
1081
|
+
`Invalid video frame rate: ${e.video.frameRate}. Must be a positive integer.`
|
|
1082
|
+
);
|
|
1083
|
+
}
|
|
1084
|
+
if (e.audio) {
|
|
1085
|
+
if (!vi.includes(e.audio.codec))
|
|
1086
|
+
throw new TypeError(`Unsupported audio codec: ${e.audio.codec}`);
|
|
1087
|
+
if (!Number.isInteger(e.audio.numberOfChannels) || e.audio.numberOfChannels <= 0)
|
|
1088
|
+
throw new TypeError(
|
|
1089
|
+
`Invalid number of audio channels: ${e.audio.numberOfChannels}. Must be a positive integer.`
|
|
1090
|
+
);
|
|
1091
|
+
if (!Number.isInteger(e.audio.sampleRate) || e.audio.sampleRate <= 0)
|
|
1092
|
+
throw new TypeError(
|
|
1093
|
+
`Invalid audio sample rate: ${e.audio.sampleRate}. Must be a positive integer.`
|
|
1094
|
+
);
|
|
1095
|
+
}
|
|
1096
|
+
if (e.firstTimestampBehavior && !Ti.includes(e.firstTimestampBehavior))
|
|
1097
|
+
throw new TypeError(`Invalid first timestamp behavior: ${e.firstTimestampBehavior}`);
|
|
1098
|
+
if (typeof e.fastStart == "object") {
|
|
1099
|
+
if (e.video) {
|
|
1100
|
+
if (e.fastStart.expectedVideoChunks === void 0)
|
|
1101
|
+
throw new TypeError("'fastStart' is an object but is missing property 'expectedVideoChunks'.");
|
|
1102
|
+
if (!Number.isInteger(e.fastStart.expectedVideoChunks) || e.fastStart.expectedVideoChunks < 0)
|
|
1103
|
+
throw new TypeError("'expectedVideoChunks' must be a non-negative integer.");
|
|
1104
|
+
}
|
|
1105
|
+
if (e.audio) {
|
|
1106
|
+
if (e.fastStart.expectedAudioChunks === void 0)
|
|
1107
|
+
throw new TypeError("'fastStart' is an object but is missing property 'expectedAudioChunks'.");
|
|
1108
|
+
if (!Number.isInteger(e.fastStart.expectedAudioChunks) || e.fastStart.expectedAudioChunks < 0)
|
|
1109
|
+
throw new TypeError("'expectedAudioChunks' must be a non-negative integer.");
|
|
1110
|
+
}
|
|
1111
|
+
} else if (![!1, "in-memory", "fragmented"].includes(e.fastStart))
|
|
1112
|
+
throw new TypeError("'fastStart' option must be false, 'in-memory', 'fragmented' or an object.");
|
|
1113
|
+
if (e.minFragmentDuration !== void 0 && (!Number.isFinite(e.minFragmentDuration) || e.minFragmentDuration < 0))
|
|
1114
|
+
throw new TypeError("'minFragmentDuration' must be a non-negative number.");
|
|
1115
|
+
};
|
|
1116
|
+
Me = /* @__PURE__ */ new WeakSet();
|
|
1117
|
+
nt = function() {
|
|
1118
|
+
var e;
|
|
1119
|
+
if (s(this, u).writeBox(vt({
|
|
1120
|
+
holdsAvc: ((e = s(this, d).video) == null ? void 0 : e.codec) === "avc",
|
|
1121
|
+
fragmented: s(this, d).fastStart === "fragmented"
|
|
1122
|
+
})), S(this, ge, s(this, u).pos), s(this, d).fastStart === "in-memory")
|
|
1123
|
+
S(this, z, be(!1));
|
|
1124
|
+
else if (s(this, d).fastStart !== "fragmented") {
|
|
1125
|
+
if (typeof s(this, d).fastStart == "object") {
|
|
1126
|
+
let t = m(this, Re, ot).call(this);
|
|
1127
|
+
s(this, u).seek(s(this, u).pos + t);
|
|
1128
|
+
}
|
|
1129
|
+
S(this, z, be(!0)), s(this, u).writeBox(s(this, z));
|
|
1130
|
+
}
|
|
1131
|
+
m(this, Z, ne).call(this);
|
|
1132
|
+
};
|
|
1133
|
+
Re = /* @__PURE__ */ new WeakSet();
|
|
1134
|
+
ot = function() {
|
|
1135
|
+
if (typeof s(this, d).fastStart != "object")
|
|
1136
|
+
return;
|
|
1137
|
+
let e = 0, t = [
|
|
1138
|
+
s(this, d).fastStart.expectedVideoChunks,
|
|
1139
|
+
s(this, d).fastStart.expectedAudioChunks
|
|
1140
|
+
];
|
|
1141
|
+
for (let i of t)
|
|
1142
|
+
i && (e += 8 * Math.ceil(2 / 3 * i), e += 4 * i, e += 12 * Math.ceil(2 / 3 * i), e += 4 * i, e += 8 * i);
|
|
1143
|
+
return e += 4096, e;
|
|
1144
|
+
};
|
|
1145
|
+
Pe = /* @__PURE__ */ new WeakSet();
|
|
1146
|
+
ht = function() {
|
|
1147
|
+
if (s(this, d).video && S(this, b, {
|
|
1148
|
+
id: 1,
|
|
1149
|
+
info: {
|
|
1150
|
+
type: "video",
|
|
1151
|
+
codec: s(this, d).video.codec,
|
|
1152
|
+
width: s(this, d).video.width,
|
|
1153
|
+
height: s(this, d).video.height,
|
|
1154
|
+
rotation: s(this, d).video.rotation ?? 0,
|
|
1155
|
+
decoderConfig: null
|
|
1156
|
+
},
|
|
1157
|
+
// The fallback contains many common frame rates as factors
|
|
1158
|
+
timescale: s(this, d).video.frameRate ?? 57600,
|
|
1159
|
+
samples: [],
|
|
1160
|
+
finalizedChunks: [],
|
|
1161
|
+
currentChunk: null,
|
|
1162
|
+
firstDecodeTimestamp: void 0,
|
|
1163
|
+
lastDecodeTimestamp: -1,
|
|
1164
|
+
timeToSampleTable: [],
|
|
1165
|
+
compositionTimeOffsetTable: [],
|
|
1166
|
+
lastTimescaleUnits: null,
|
|
1167
|
+
lastSample: null,
|
|
1168
|
+
compactlyCodedChunkTable: []
|
|
1169
|
+
}), s(this, d).audio && (S(this, T, {
|
|
1170
|
+
id: s(this, d).video ? 2 : 1,
|
|
1171
|
+
info: {
|
|
1172
|
+
type: "audio",
|
|
1173
|
+
codec: s(this, d).audio.codec,
|
|
1174
|
+
numberOfChannels: s(this, d).audio.numberOfChannels,
|
|
1175
|
+
sampleRate: s(this, d).audio.sampleRate,
|
|
1176
|
+
decoderConfig: null
|
|
1177
|
+
},
|
|
1178
|
+
timescale: s(this, d).audio.sampleRate,
|
|
1179
|
+
samples: [],
|
|
1180
|
+
finalizedChunks: [],
|
|
1181
|
+
currentChunk: null,
|
|
1182
|
+
firstDecodeTimestamp: void 0,
|
|
1183
|
+
lastDecodeTimestamp: -1,
|
|
1184
|
+
timeToSampleTable: [],
|
|
1185
|
+
compositionTimeOffsetTable: [],
|
|
1186
|
+
lastTimescaleUnits: null,
|
|
1187
|
+
lastSample: null,
|
|
1188
|
+
compactlyCodedChunkTable: []
|
|
1189
|
+
}), s(this, d).audio.codec === "aac")) {
|
|
1190
|
+
let e = m(this, Ve, lt).call(
|
|
1191
|
+
this,
|
|
1192
|
+
2,
|
|
1193
|
+
// Object type for AAC-LC, since it's the most common
|
|
1194
|
+
s(this, d).audio.sampleRate,
|
|
1195
|
+
s(this, d).audio.numberOfChannels
|
|
1196
|
+
);
|
|
1197
|
+
s(this, T).info.decoderConfig = {
|
|
1198
|
+
codec: s(this, d).audio.codec,
|
|
1199
|
+
description: e,
|
|
1200
|
+
numberOfChannels: s(this, d).audio.numberOfChannels,
|
|
1201
|
+
sampleRate: s(this, d).audio.sampleRate
|
|
1202
|
+
};
|
|
1203
|
+
}
|
|
1204
|
+
};
|
|
1205
|
+
Ve = /* @__PURE__ */ new WeakSet();
|
|
1206
|
+
lt = function(e, t, i) {
|
|
1207
|
+
let a = [96e3, 88200, 64e3, 48e3, 44100, 32e3, 24e3, 22050, 16e3, 12e3, 11025, 8e3, 7350].indexOf(t), n = i, o = "";
|
|
1208
|
+
o += e.toString(2).padStart(5, "0"), o += a.toString(2).padStart(4, "0"), a === 15 && (o += t.toString(2).padStart(24, "0")), o += n.toString(2).padStart(4, "0");
|
|
1209
|
+
let l = Math.ceil(o.length / 8) * 8;
|
|
1210
|
+
o = o.padEnd(l, "0");
|
|
1211
|
+
let p = new Uint8Array(o.length / 8);
|
|
1212
|
+
for (let y = 0; y < o.length; y += 8)
|
|
1213
|
+
p[y / 8] = parseInt(o.slice(y, y + 8), 2);
|
|
1214
|
+
return p;
|
|
1215
|
+
};
|
|
1216
|
+
ue = /* @__PURE__ */ new WeakSet();
|
|
1217
|
+
De = function(e, t, i, r, a, n, o) {
|
|
1218
|
+
let l = r / 1e6, p = (r - (o ?? 0)) / 1e6, y = a / 1e6, _ = m(this, Le, dt).call(this, l, p, e);
|
|
1219
|
+
return l = _.presentationTimestamp, p = _.decodeTimestamp, n != null && n.decoderConfig && (e.info.decoderConfig === null ? e.info.decoderConfig = n.decoderConfig : Object.assign(e.info.decoderConfig, n.decoderConfig)), {
|
|
1220
|
+
presentationTimestamp: l,
|
|
1221
|
+
decodeTimestamp: p,
|
|
1222
|
+
duration: y,
|
|
1223
|
+
data: t,
|
|
1224
|
+
size: t.byteLength,
|
|
1225
|
+
type: i,
|
|
1226
|
+
// Will be refined once the next sample comes in
|
|
1227
|
+
timescaleUnitsToNextSample: x(y, e.timescale)
|
|
1228
|
+
};
|
|
1229
|
+
};
|
|
1230
|
+
D = /* @__PURE__ */ new WeakSet();
|
|
1231
|
+
F = function(e, t) {
|
|
1232
|
+
s(this, d).fastStart !== "fragmented" && e.samples.push(t);
|
|
1233
|
+
const i = x(t.presentationTimestamp - t.decodeTimestamp, e.timescale);
|
|
1234
|
+
if (e.lastTimescaleUnits !== null) {
|
|
1235
|
+
let a = x(t.decodeTimestamp, e.timescale, !1), n = Math.round(a - e.lastTimescaleUnits);
|
|
1236
|
+
if (e.lastTimescaleUnits += n, e.lastSample.timescaleUnitsToNextSample = n, s(this, d).fastStart !== "fragmented") {
|
|
1237
|
+
let o = me(e.timeToSampleTable);
|
|
1238
|
+
o.sampleCount === 1 ? (o.sampleDelta = n, o.sampleCount++) : o.sampleDelta === n ? o.sampleCount++ : (o.sampleCount--, e.timeToSampleTable.push({
|
|
1239
|
+
sampleCount: 2,
|
|
1240
|
+
sampleDelta: n
|
|
1241
|
+
}));
|
|
1242
|
+
const l = me(e.compositionTimeOffsetTable);
|
|
1243
|
+
l.sampleCompositionTimeOffset === i ? l.sampleCount++ : e.compositionTimeOffsetTable.push({
|
|
1244
|
+
sampleCount: 1,
|
|
1245
|
+
sampleCompositionTimeOffset: i
|
|
1246
|
+
});
|
|
1247
|
+
}
|
|
1248
|
+
} else
|
|
1249
|
+
e.lastTimescaleUnits = 0, s(this, d).fastStart !== "fragmented" && (e.timeToSampleTable.push({
|
|
1250
|
+
sampleCount: 1,
|
|
1251
|
+
sampleDelta: x(t.duration, e.timescale)
|
|
1252
|
+
}), e.compositionTimeOffsetTable.push({
|
|
1253
|
+
sampleCount: 1,
|
|
1254
|
+
sampleCompositionTimeOffset: i
|
|
1255
|
+
}));
|
|
1256
|
+
e.lastSample = t;
|
|
1257
|
+
let r = !1;
|
|
1258
|
+
if (!e.currentChunk)
|
|
1259
|
+
r = !0;
|
|
1260
|
+
else {
|
|
1261
|
+
let a = t.presentationTimestamp - e.currentChunk.startTimestamp;
|
|
1262
|
+
if (s(this, d).fastStart === "fragmented") {
|
|
1263
|
+
let n = s(this, b) ?? s(this, T);
|
|
1264
|
+
const o = s(this, d).minFragmentDuration ?? 1;
|
|
1265
|
+
e === n && t.type === "key" && a >= o && (r = !0, m(this, ye, $e).call(this));
|
|
1266
|
+
} else
|
|
1267
|
+
r = a >= 0.5;
|
|
1268
|
+
}
|
|
1269
|
+
r && (e.currentChunk && m(this, re, we).call(this, e), e.currentChunk = {
|
|
1270
|
+
startTimestamp: t.presentationTimestamp,
|
|
1271
|
+
samples: []
|
|
1272
|
+
}), e.currentChunk.samples.push(t);
|
|
1273
|
+
};
|
|
1274
|
+
Le = /* @__PURE__ */ new WeakSet();
|
|
1275
|
+
dt = function(e, t, i) {
|
|
1276
|
+
var o, l;
|
|
1277
|
+
const r = s(this, d).firstTimestampBehavior === "strict", a = i.lastDecodeTimestamp === -1;
|
|
1278
|
+
if (r && a && t !== 0)
|
|
1279
|
+
throw new Error(
|
|
1280
|
+
`The first chunk for your media track must have a timestamp of 0 (received DTS=${t}).Non-zero first timestamps are often caused by directly piping frames or audio data from a MediaStreamTrack into the encoder. Their timestamps are typically relative to the age of thedocument, which is probably what you want.
|
|
1281
|
+
|
|
1282
|
+
If you want to offset all timestamps of a track such that the first one is zero, set firstTimestampBehavior: 'offset' in the options.
|
|
1283
|
+
`
|
|
1284
|
+
);
|
|
1285
|
+
if (s(this, d).firstTimestampBehavior === "offset" || s(this, d).firstTimestampBehavior === "cross-track-offset") {
|
|
1286
|
+
i.firstDecodeTimestamp === void 0 && (i.firstDecodeTimestamp = t);
|
|
1287
|
+
let p;
|
|
1288
|
+
s(this, d).firstTimestampBehavior === "offset" ? p = i.firstDecodeTimestamp : p = Math.min(
|
|
1289
|
+
((o = s(this, b)) == null ? void 0 : o.firstDecodeTimestamp) ?? 1 / 0,
|
|
1290
|
+
((l = s(this, T)) == null ? void 0 : l.firstDecodeTimestamp) ?? 1 / 0
|
|
1291
|
+
), t -= p, e -= p;
|
|
1292
|
+
}
|
|
1293
|
+
if (t < i.lastDecodeTimestamp)
|
|
1294
|
+
throw new Error(
|
|
1295
|
+
`Timestamps must be monotonically increasing (DTS went from ${i.lastDecodeTimestamp * 1e6} to ${t * 1e6}).`
|
|
1296
|
+
);
|
|
1297
|
+
return i.lastDecodeTimestamp = t, { presentationTimestamp: e, decodeTimestamp: t };
|
|
1298
|
+
};
|
|
1299
|
+
re = /* @__PURE__ */ new WeakSet();
|
|
1300
|
+
we = function(e) {
|
|
1301
|
+
if (s(this, d).fastStart === "fragmented")
|
|
1302
|
+
throw new Error("Can't finalize individual chunks if 'fastStart' is set to 'fragmented'.");
|
|
1303
|
+
if (e.currentChunk) {
|
|
1304
|
+
if (e.finalizedChunks.push(e.currentChunk), s(this, G).push(e.currentChunk), (e.compactlyCodedChunkTable.length === 0 || me(e.compactlyCodedChunkTable).samplesPerChunk !== e.currentChunk.samples.length) && e.compactlyCodedChunkTable.push({
|
|
1305
|
+
firstChunk: e.finalizedChunks.length,
|
|
1306
|
+
// 1-indexed
|
|
1307
|
+
samplesPerChunk: e.currentChunk.samples.length
|
|
1308
|
+
}), s(this, d).fastStart === "in-memory") {
|
|
1309
|
+
e.currentChunk.offset = 0;
|
|
1310
|
+
return;
|
|
1311
|
+
}
|
|
1312
|
+
e.currentChunk.offset = s(this, u).pos;
|
|
1313
|
+
for (let t of e.currentChunk.samples)
|
|
1314
|
+
s(this, u).write(t.data), t.data = null;
|
|
1315
|
+
m(this, Z, ne).call(this);
|
|
1316
|
+
}
|
|
1317
|
+
};
|
|
1318
|
+
ye = /* @__PURE__ */ new WeakSet();
|
|
1319
|
+
$e = function(e = !0) {
|
|
1320
|
+
if (s(this, d).fastStart !== "fragmented")
|
|
1321
|
+
throw new Error("Can't finalize a fragment unless 'fastStart' is set to 'fragmented'.");
|
|
1322
|
+
let t = [s(this, b), s(this, T)].filter((l) => l && l.currentChunk);
|
|
1323
|
+
if (t.length === 0)
|
|
1324
|
+
return;
|
|
1325
|
+
let i = gt(this, Ne)._++;
|
|
1326
|
+
if (i === 1) {
|
|
1327
|
+
let l = he(t, s(this, H), !0);
|
|
1328
|
+
s(this, u).writeBox(l);
|
|
1329
|
+
}
|
|
1330
|
+
let r = s(this, u).pos, a = qe(i, t);
|
|
1331
|
+
s(this, u).writeBox(a);
|
|
1332
|
+
{
|
|
1333
|
+
let l = be(!1), p = 0;
|
|
1334
|
+
for (let _ of t)
|
|
1335
|
+
for (let W of _.currentChunk.samples)
|
|
1336
|
+
p += W.size;
|
|
1337
|
+
let y = s(this, u).measureBox(l) + p;
|
|
1338
|
+
y >= 2 ** 32 && (l.largeSize = !0, y = s(this, u).measureBox(l) + p), l.size = y, s(this, u).writeBox(l);
|
|
1339
|
+
}
|
|
1340
|
+
for (let l of t) {
|
|
1341
|
+
l.currentChunk.offset = s(this, u).pos, l.currentChunk.moofOffset = r;
|
|
1342
|
+
for (let p of l.currentChunk.samples)
|
|
1343
|
+
s(this, u).write(p.data), p.data = null;
|
|
1344
|
+
}
|
|
1345
|
+
let n = s(this, u).pos;
|
|
1346
|
+
s(this, u).seek(s(this, u).offsets.get(a));
|
|
1347
|
+
let o = qe(i, t);
|
|
1348
|
+
s(this, u).writeBox(o), s(this, u).seek(n);
|
|
1349
|
+
for (let l of t)
|
|
1350
|
+
l.finalizedChunks.push(l.currentChunk), s(this, G).push(l.currentChunk), l.currentChunk = null;
|
|
1351
|
+
e && m(this, Z, ne).call(this);
|
|
1352
|
+
};
|
|
1353
|
+
Z = /* @__PURE__ */ new WeakSet();
|
|
1354
|
+
ne = function() {
|
|
1355
|
+
s(this, u) instanceof We && s(this, u).flush();
|
|
1356
|
+
};
|
|
1357
|
+
fe = /* @__PURE__ */ new WeakSet();
|
|
1358
|
+
Ie = function() {
|
|
1359
|
+
if (s(this, se))
|
|
1360
|
+
throw new Error("Cannot add new video or audio chunks after the file has been finalized.");
|
|
1361
|
+
};
|
|
1362
|
+
const E = 30;
|
|
1363
|
+
class ki {
|
|
1364
|
+
constructor(t) {
|
|
1365
|
+
this.audioContext = null, this.sourceNode = null, this.isPlaying = !1, this.startTime = 0, this.pauseTime = 0, this.duration = t.duration, this.audioConfig = t.audioConfig, this.encodedChunks = [], this.audioSegments = /* @__PURE__ */ new Map(), this.scheduledNodes = /* @__PURE__ */ new Map(), this.preloadThreshold = 5, this.isPreloading = !1, this.worker = t.worker, this.file = t.file, this.init();
|
|
94
1366
|
}
|
|
95
1367
|
init() {
|
|
96
1368
|
this.audioContext = new AudioContext(), this.seek(0);
|
|
@@ -99,89 +1371,89 @@ class I {
|
|
|
99
1371
|
* Mux EncodedAudioChunks to an ArrayBuffer for Web Audio API decoding
|
|
100
1372
|
* @param chunks - Array of EncodedAudioChunks from a segment
|
|
101
1373
|
*/
|
|
102
|
-
async muxEncodedChunksToBuffer(
|
|
103
|
-
const
|
|
104
|
-
target: new
|
|
1374
|
+
async muxEncodedChunksToBuffer(t, i) {
|
|
1375
|
+
const r = new Si({
|
|
1376
|
+
target: new et(),
|
|
105
1377
|
fastStart: "in-memory",
|
|
106
1378
|
firstTimestampBehavior: "offset",
|
|
107
1379
|
audio: {
|
|
108
1380
|
codec: "aac",
|
|
109
|
-
sampleRate:
|
|
110
|
-
numberOfChannels:
|
|
1381
|
+
sampleRate: i.sampleRate,
|
|
1382
|
+
numberOfChannels: i.numberOfChannels
|
|
111
1383
|
}
|
|
112
1384
|
});
|
|
113
|
-
for (const
|
|
114
|
-
|
|
115
|
-
return await
|
|
1385
|
+
for (const a of t)
|
|
1386
|
+
r.addAudioChunk(a);
|
|
1387
|
+
return await r.finalize(), r.target.buffer;
|
|
116
1388
|
}
|
|
117
1389
|
/**
|
|
118
1390
|
* Fetch EncodedAudioChunks for a specific time segment from the file worker
|
|
119
1391
|
* @param time - Time in seconds
|
|
120
1392
|
* @returns Array of EncodedAudioChunks
|
|
121
1393
|
*/
|
|
122
|
-
async getEncodedChunksForTime(
|
|
123
|
-
const
|
|
124
|
-
start:
|
|
125
|
-
end:
|
|
1394
|
+
async getEncodedChunksForTime(t) {
|
|
1395
|
+
const i = Math.floor(t / E), r = await this.worker.sendMessage("get-audio-segment", {
|
|
1396
|
+
start: i * E,
|
|
1397
|
+
end: i * E + E,
|
|
126
1398
|
file: this.file
|
|
127
1399
|
});
|
|
128
|
-
return this.encodedChunks =
|
|
1400
|
+
return this.encodedChunks = r, r;
|
|
129
1401
|
}
|
|
130
1402
|
/**
|
|
131
1403
|
* Load and decode an audio segment
|
|
132
1404
|
* @param time - Time in seconds
|
|
133
1405
|
* @returns Decoded AudioBuffer for the segment
|
|
134
1406
|
*/
|
|
135
|
-
async loadSegment(
|
|
136
|
-
const
|
|
137
|
-
if (this.audioSegments.has(
|
|
138
|
-
return this.audioSegments.get(
|
|
139
|
-
const
|
|
140
|
-
if (
|
|
1407
|
+
async loadSegment(t) {
|
|
1408
|
+
const i = Math.floor(t / E);
|
|
1409
|
+
if (this.audioSegments.has(i))
|
|
1410
|
+
return this.audioSegments.get(i);
|
|
1411
|
+
const r = await this.getEncodedChunksForTime(i * E);
|
|
1412
|
+
if (r.length === 0) return null;
|
|
141
1413
|
try {
|
|
142
|
-
const
|
|
143
|
-
return console.log(`Segment ${
|
|
144
|
-
} catch (
|
|
145
|
-
return console.error("Error loading audio segment:",
|
|
1414
|
+
const a = performance.now(), n = await this.muxEncodedChunksToBuffer(r, this.audioConfig), o = performance.now(), l = await this.audioContext.decodeAudioData(n), p = performance.now();
|
|
1415
|
+
return console.log(`Segment ${i}: Muxing took ${o - a}ms, Decoding took ${p - o}ms`), this.audioSegments.set(i, l), l;
|
|
1416
|
+
} catch (a) {
|
|
1417
|
+
return console.error("Error loading audio segment:", a), null;
|
|
146
1418
|
}
|
|
147
1419
|
}
|
|
148
|
-
async startPlayback(
|
|
1420
|
+
async startPlayback(t = this.pauseTime) {
|
|
149
1421
|
this.clearScheduledNodes();
|
|
150
|
-
const
|
|
151
|
-
if (!
|
|
152
|
-
const
|
|
153
|
-
this.scheduleSegment(
|
|
1422
|
+
const i = await this.loadSegment(t);
|
|
1423
|
+
if (!i) return;
|
|
1424
|
+
const r = t % E, a = E - r;
|
|
1425
|
+
this.scheduleSegment(i, t, r), this.preloadNextSegment(t + a);
|
|
154
1426
|
}
|
|
155
1427
|
clearScheduledNodes() {
|
|
156
|
-
for (const
|
|
157
|
-
|
|
1428
|
+
for (const t of this.scheduledNodes.values())
|
|
1429
|
+
t.stop(), t.disconnect();
|
|
158
1430
|
this.scheduledNodes.clear();
|
|
159
1431
|
}
|
|
160
1432
|
getCurrentSegmentIndex() {
|
|
161
|
-
return Math.floor(this.getCurrentTime() /
|
|
1433
|
+
return Math.floor(this.getCurrentTime() / E);
|
|
162
1434
|
}
|
|
163
|
-
async preloadNextSegment(
|
|
164
|
-
if (this.isPreloading ||
|
|
165
|
-
const
|
|
166
|
-
if (this.audioSegments.has(
|
|
167
|
-
this.scheduleSegment(this.audioSegments.get(
|
|
1435
|
+
async preloadNextSegment(t) {
|
|
1436
|
+
if (this.isPreloading || t >= this.duration) return;
|
|
1437
|
+
const i = Math.floor(t / E);
|
|
1438
|
+
if (this.audioSegments.has(i)) {
|
|
1439
|
+
this.scheduleSegment(this.audioSegments.get(i), t, 0);
|
|
168
1440
|
return;
|
|
169
1441
|
}
|
|
170
1442
|
this.isPreloading = !0;
|
|
171
1443
|
try {
|
|
172
|
-
const
|
|
173
|
-
if (!
|
|
174
|
-
this.scheduleSegment(
|
|
1444
|
+
const r = await this.loadSegment(t);
|
|
1445
|
+
if (!r || !this.isPlaying) return;
|
|
1446
|
+
this.scheduleSegment(r, t, 0);
|
|
175
1447
|
} finally {
|
|
176
1448
|
this.isPreloading = !1;
|
|
177
1449
|
}
|
|
178
1450
|
}
|
|
179
|
-
scheduleSegment(
|
|
180
|
-
const
|
|
181
|
-
|
|
182
|
-
const
|
|
183
|
-
|
|
184
|
-
|
|
1451
|
+
scheduleSegment(t, i, r) {
|
|
1452
|
+
const a = this.audioContext.createBufferSource();
|
|
1453
|
+
a.buffer = t, a.connect(this.audioContext.destination);
|
|
1454
|
+
const n = this.startTime + (i - this.pauseTime);
|
|
1455
|
+
a.start(n, r), this.scheduledNodes.set(i, a), a.onended = () => {
|
|
1456
|
+
a.disconnect(), this.scheduledNodes.delete(i);
|
|
185
1457
|
};
|
|
186
1458
|
}
|
|
187
1459
|
async play() {
|
|
@@ -190,56 +1462,56 @@ class I {
|
|
|
190
1462
|
async pause() {
|
|
191
1463
|
this.clearScheduledNodes(), this.pauseTime = this.getCurrentTime(), this.isPlaying = !1;
|
|
192
1464
|
}
|
|
193
|
-
async seek(
|
|
194
|
-
const
|
|
195
|
-
|
|
1465
|
+
async seek(t) {
|
|
1466
|
+
const i = this.isPlaying;
|
|
1467
|
+
i && (this.clearScheduledNodes(), this.isPlaying = !1), this.pauseTime = t, i && (this.startTime = this.audioContext.currentTime, this.isPlaying = !0, await this.startPlayback(t));
|
|
196
1468
|
}
|
|
197
1469
|
checkForPreLoad() {
|
|
198
1470
|
if (!this.isPlaying) return;
|
|
199
|
-
const
|
|
200
|
-
|
|
1471
|
+
const t = this.getCurrentTime(), i = this.getCurrentSegmentIndex();
|
|
1472
|
+
t % E >= E - this.preloadThreshold && !this.isPreloading && !this.audioSegments.has(i + 1) && this.preloadNextSegment((i + 1) * E);
|
|
201
1473
|
}
|
|
202
1474
|
getCurrentTime() {
|
|
203
1475
|
return this.isPlaying ? this.pauseTime + (this.audioContext.currentTime - this.startTime) : this.pauseTime;
|
|
204
1476
|
}
|
|
205
1477
|
}
|
|
206
|
-
class
|
|
1478
|
+
class ut {
|
|
207
1479
|
constructor() {
|
|
208
1480
|
this.listeners = {};
|
|
209
1481
|
}
|
|
210
1482
|
/**
|
|
211
1483
|
* Add an event listener and return its unique ID
|
|
212
1484
|
*/
|
|
213
|
-
on(
|
|
214
|
-
this.listeners[
|
|
215
|
-
const
|
|
216
|
-
return this.listeners[
|
|
1485
|
+
on(t, i) {
|
|
1486
|
+
this.listeners[t] || (this.listeners[t] = {});
|
|
1487
|
+
const r = Xe();
|
|
1488
|
+
return this.listeners[t][r] = i, r;
|
|
217
1489
|
}
|
|
218
1490
|
/**
|
|
219
1491
|
* Remove a specific listener by its ID
|
|
220
1492
|
*/
|
|
221
|
-
off(
|
|
222
|
-
this.listeners[
|
|
1493
|
+
off(t, i) {
|
|
1494
|
+
this.listeners[t] && delete this.listeners[t][i];
|
|
223
1495
|
}
|
|
224
1496
|
/**
|
|
225
1497
|
* Emit an event to all registered listeners
|
|
226
1498
|
*/
|
|
227
|
-
emit(
|
|
228
|
-
this.listeners[
|
|
229
|
-
i
|
|
1499
|
+
emit(t, i) {
|
|
1500
|
+
this.listeners[t] && Object.values(this.listeners[t]).forEach((r) => {
|
|
1501
|
+
r(i);
|
|
230
1502
|
});
|
|
231
1503
|
}
|
|
232
1504
|
/**
|
|
233
1505
|
* Remove all listeners for a specific event
|
|
234
1506
|
*/
|
|
235
|
-
removeAllListeners(
|
|
236
|
-
this.listeners[
|
|
1507
|
+
removeAllListeners(t) {
|
|
1508
|
+
this.listeners[t] && (this.listeners[t] = {});
|
|
237
1509
|
}
|
|
238
1510
|
}
|
|
239
|
-
const
|
|
240
|
-
class
|
|
241
|
-
constructor(
|
|
242
|
-
super(), this.offscreenCanvas = null, this.duration = 0, this.canvas =
|
|
1511
|
+
const bi = "/assets/video.worker-DrxbiAg9.js";
|
|
1512
|
+
class He extends ut {
|
|
1513
|
+
constructor(t) {
|
|
1514
|
+
super(), this.offscreenCanvas = null, this.duration = 0, this.canvas = t.canvas, this.fileWorkerPort = t.fileWorkerPort, this.worker = new Se(bi);
|
|
243
1515
|
}
|
|
244
1516
|
/**
|
|
245
1517
|
* Send a message to the worker and wait for a response
|
|
@@ -249,17 +1521,17 @@ class C extends x {
|
|
|
249
1521
|
*/
|
|
250
1522
|
async initialize() {
|
|
251
1523
|
this.offscreenCanvas = this.canvas.transferControlToOffscreen();
|
|
252
|
-
const
|
|
1524
|
+
const t = await this.worker.sendMessage("init", {
|
|
253
1525
|
canvas: this.offscreenCanvas,
|
|
254
1526
|
fileWorkerPort: this.fileWorkerPort
|
|
255
1527
|
}, [this.offscreenCanvas, this.fileWorkerPort]);
|
|
256
|
-
this.emit("initialized",
|
|
1528
|
+
this.emit("initialized", t);
|
|
257
1529
|
}
|
|
258
1530
|
/**
|
|
259
1531
|
* Seek to a specific time
|
|
260
1532
|
*/
|
|
261
|
-
async seek(
|
|
262
|
-
await this.worker.sendMessage("seek", { time:
|
|
1533
|
+
async seek(t) {
|
|
1534
|
+
await this.worker.sendMessage("seek", { time: t });
|
|
263
1535
|
}
|
|
264
1536
|
/**
|
|
265
1537
|
* Get debug information from the video worker
|
|
@@ -267,10 +1539,10 @@ class C extends x {
|
|
|
267
1539
|
async getDebugInfo() {
|
|
268
1540
|
return await this.worker.sendMessage("get-debug-info", {});
|
|
269
1541
|
}
|
|
270
|
-
async setTrackData(
|
|
1542
|
+
async setTrackData(t, i) {
|
|
271
1543
|
await this.worker.sendMessage("set-track-data", {
|
|
272
|
-
videoMetadata:
|
|
273
|
-
duration:
|
|
1544
|
+
videoMetadata: t,
|
|
1545
|
+
duration: i
|
|
274
1546
|
});
|
|
275
1547
|
}
|
|
276
1548
|
/**
|
|
@@ -284,19 +1556,19 @@ class C extends x {
|
|
|
284
1556
|
/**
|
|
285
1557
|
* Update the current frame (animation loop)
|
|
286
1558
|
*/
|
|
287
|
-
render(
|
|
288
|
-
this.worker.sendMessage("render", { time:
|
|
1559
|
+
render(t) {
|
|
1560
|
+
this.worker.sendMessage("render", { time: t });
|
|
289
1561
|
}
|
|
290
1562
|
}
|
|
291
|
-
class
|
|
1563
|
+
class _i extends ut {
|
|
292
1564
|
/**
|
|
293
1565
|
* Create a new Clock
|
|
294
1566
|
* @param audioPlayer - Audio player with Web Audio timeline
|
|
295
1567
|
* @param videoWorker - Video worker for passive rendering
|
|
296
1568
|
* @param duration - Total video duration in seconds
|
|
297
1569
|
*/
|
|
298
|
-
constructor(
|
|
299
|
-
super(), this.isPlaying = !1, this.animationFrame = null, this.TARGET_FPS = 30, this.lastFrameTime = 0, this.audioPlayer =
|
|
1570
|
+
constructor(t, i, r) {
|
|
1571
|
+
super(), this.isPlaying = !1, this.animationFrame = null, this.TARGET_FPS = 30, this.lastFrameTime = 0, this.audioPlayer = t, this.videoWorker = i, this.duration = r, this.FRAME_INTERVAL = 1e3 / this.TARGET_FPS;
|
|
300
1572
|
}
|
|
301
1573
|
/**
|
|
302
1574
|
* Start playback
|
|
@@ -320,9 +1592,9 @@ class b extends x {
|
|
|
320
1592
|
*
|
|
321
1593
|
* @param time - Time in seconds
|
|
322
1594
|
*/
|
|
323
|
-
async seek(
|
|
324
|
-
const
|
|
325
|
-
this.videoWorker.seek(
|
|
1595
|
+
async seek(t) {
|
|
1596
|
+
const i = Math.max(0, Math.min(t, this.duration));
|
|
1597
|
+
this.videoWorker.seek(i), await this.audioPlayer.seek(i), this.emit("seek", i);
|
|
326
1598
|
}
|
|
327
1599
|
/**
|
|
328
1600
|
* Get the current playback time
|
|
@@ -354,24 +1626,24 @@ class b extends x {
|
|
|
354
1626
|
*/
|
|
355
1627
|
tick() {
|
|
356
1628
|
if (!this.isPlaying) return;
|
|
357
|
-
const
|
|
358
|
-
if (
|
|
1629
|
+
const t = performance.now();
|
|
1630
|
+
if (t - this.lastFrameTime < this.FRAME_INTERVAL) {
|
|
359
1631
|
this.animationFrame = requestAnimationFrame(() => this.tick());
|
|
360
1632
|
return;
|
|
361
1633
|
}
|
|
362
|
-
this.lastFrameTime =
|
|
363
|
-
const
|
|
364
|
-
if (
|
|
1634
|
+
this.lastFrameTime = t;
|
|
1635
|
+
const r = this.audioPlayer.getCurrentTime();
|
|
1636
|
+
if (r >= this.duration - 0.1) {
|
|
365
1637
|
this.pause(), this.emit("ended");
|
|
366
1638
|
return;
|
|
367
1639
|
}
|
|
368
|
-
this.emit("tick",
|
|
1640
|
+
this.emit("tick", r), this.videoWorker.render(r), this.audioPlayer.checkForPreLoad(), this.animationFrame = requestAnimationFrame(() => this.tick());
|
|
369
1641
|
}
|
|
370
1642
|
/**
|
|
371
1643
|
* Update duration (if needed after initialization)
|
|
372
1644
|
*/
|
|
373
|
-
setDuration(
|
|
374
|
-
this.duration =
|
|
1645
|
+
setDuration(t) {
|
|
1646
|
+
this.duration = t;
|
|
375
1647
|
}
|
|
376
1648
|
/**
|
|
377
1649
|
* Clean up resources
|
|
@@ -380,10 +1652,10 @@ class b extends x {
|
|
|
380
1652
|
this.animationFrame && (cancelAnimationFrame(this.animationFrame), this.animationFrame = null), this.isPlaying = !1;
|
|
381
1653
|
}
|
|
382
1654
|
}
|
|
383
|
-
const
|
|
384
|
-
class
|
|
385
|
-
constructor(
|
|
386
|
-
this.canvas = null, this.duration = 0, this.renderer = null, this.audioPlayer = null, this.worker = null, this.clock = null, this.trackData = null, this.params =
|
|
1655
|
+
const Ge = "/assets/file-DCgsDVsh.js";
|
|
1656
|
+
class Ei {
|
|
1657
|
+
constructor(t) {
|
|
1658
|
+
this.canvas = null, this.duration = 0, this.renderer = null, this.audioPlayer = null, this.worker = null, this.clock = null, this.trackData = null, this.params = t, this.worker = new Se(Ge), this.file = t.src, this.canvas = t.canvas, this.duration = 0;
|
|
387
1659
|
}
|
|
388
1660
|
async play() {
|
|
389
1661
|
if (!this.clock)
|
|
@@ -395,74 +1667,74 @@ class z {
|
|
|
395
1667
|
throw new Error("Player not initialized. Call initialize() first.");
|
|
396
1668
|
this.clock.pause();
|
|
397
1669
|
}
|
|
398
|
-
async seek(
|
|
1670
|
+
async seek(t) {
|
|
399
1671
|
if (!this.clock)
|
|
400
1672
|
throw new Error("Player not initialized. Call initialize() first.");
|
|
401
|
-
await this.clock.seek(
|
|
1673
|
+
await this.clock.seek(t);
|
|
402
1674
|
}
|
|
403
1675
|
getCurrentTime() {
|
|
404
|
-
var
|
|
405
|
-
return ((
|
|
1676
|
+
var t;
|
|
1677
|
+
return ((t = this.clock) == null ? void 0 : t.getCurrentTime()) || 0;
|
|
406
1678
|
}
|
|
407
1679
|
async getDebugInfo() {
|
|
408
|
-
var
|
|
409
|
-
const
|
|
1680
|
+
var i, r, a, n, o, l, p, y, _, W, K, Q, P;
|
|
1681
|
+
const t = this.renderer ? await this.renderer.getDebugInfo() : null;
|
|
410
1682
|
return {
|
|
411
1683
|
trackData: {
|
|
412
1684
|
duration: this.duration,
|
|
413
1685
|
audio: this.audioPlayer ? {
|
|
414
|
-
codec: (
|
|
415
|
-
sampleRate: (
|
|
416
|
-
numberOfChannels: (
|
|
1686
|
+
codec: (i = this.audioPlayer.audioConfig) == null ? void 0 : i.codec,
|
|
1687
|
+
sampleRate: (r = this.audioPlayer.audioConfig) == null ? void 0 : r.sampleRate,
|
|
1688
|
+
numberOfChannels: (a = this.audioPlayer.audioConfig) == null ? void 0 : a.numberOfChannels,
|
|
417
1689
|
startTime: this.audioPlayer.startTime,
|
|
418
1690
|
pauseTime: this.audioPlayer.pauseTime,
|
|
419
1691
|
isPlaying: this.audioPlayer.isPlaying,
|
|
420
1692
|
loadedSegments: this.audioPlayer.audioSegments.size,
|
|
421
1693
|
scheduledNodeCount: this.audioPlayer.scheduledNodes.size
|
|
422
1694
|
} : null,
|
|
423
|
-
video:
|
|
424
|
-
duration: (
|
|
425
|
-
codec: (
|
|
426
|
-
width: (
|
|
427
|
-
height: (
|
|
428
|
-
frameRate: (
|
|
429
|
-
...
|
|
1695
|
+
video: t ? {
|
|
1696
|
+
duration: (n = this.renderer) == null ? void 0 : n.duration,
|
|
1697
|
+
codec: (l = (o = this.trackData) == null ? void 0 : o.video) == null ? void 0 : l.codec,
|
|
1698
|
+
width: (y = (p = this.trackData) == null ? void 0 : p.video) == null ? void 0 : y.codedWidth,
|
|
1699
|
+
height: (W = (_ = this.trackData) == null ? void 0 : _.video) == null ? void 0 : W.codedHeight,
|
|
1700
|
+
frameRate: (Q = (K = this.trackData) == null ? void 0 : K.video) == null ? void 0 : Q.frameRate,
|
|
1701
|
+
...t
|
|
430
1702
|
} : null
|
|
431
1703
|
},
|
|
432
1704
|
clock: {
|
|
433
|
-
isPlaying: (
|
|
1705
|
+
isPlaying: (P = this.clock) == null ? void 0 : P.playing(),
|
|
434
1706
|
currentTime: this.getCurrentTime()
|
|
435
1707
|
}
|
|
436
1708
|
};
|
|
437
1709
|
}
|
|
438
1710
|
terminate() {
|
|
439
|
-
this.clock && (this.clock.destroy(), this.clock = null), this.audioPlayer && this.audioPlayer.pause(), this.renderer && (this.renderer instanceof
|
|
1711
|
+
this.clock && (this.clock.destroy(), this.clock = null), this.audioPlayer && this.audioPlayer.pause(), this.renderer && (this.renderer instanceof He && this.renderer.terminate(), this.renderer = null);
|
|
440
1712
|
}
|
|
441
1713
|
async initialize() {
|
|
442
|
-
console.log("Initializing"), this.worker = new
|
|
443
|
-
const
|
|
1714
|
+
console.log("Initializing"), this.worker = new Se(Ge);
|
|
1715
|
+
const t = new MessageChannel();
|
|
444
1716
|
await this.worker.sendMessage("init", {
|
|
445
1717
|
file: this.file,
|
|
446
|
-
videoPort:
|
|
447
|
-
}, [
|
|
448
|
-
const
|
|
449
|
-
console.log("Track data",
|
|
1718
|
+
videoPort: t.port1
|
|
1719
|
+
}, [t.port1]);
|
|
1720
|
+
const i = await this.worker.sendMessage("get-tracks", {});
|
|
1721
|
+
console.log("Track data", i), this.trackData = i, this.duration = i.duration, this.renderer = new He({
|
|
450
1722
|
src: this.file,
|
|
451
1723
|
canvas: this.canvas,
|
|
452
|
-
fileWorkerPort:
|
|
453
|
-
}), await this.renderer.initialize(), await this.renderer.setTrackData(
|
|
1724
|
+
fileWorkerPort: t.port2
|
|
1725
|
+
}), await this.renderer.initialize(), await this.renderer.setTrackData(i.video, i.duration), this.audioPlayer = new ki({
|
|
454
1726
|
worker: this.worker,
|
|
455
|
-
audioConfig:
|
|
456
|
-
duration:
|
|
1727
|
+
audioConfig: i.audio,
|
|
1728
|
+
duration: i.duration,
|
|
457
1729
|
file: this.file
|
|
458
|
-
}), this.clock = new
|
|
1730
|
+
}), this.clock = new _i(this.audioPlayer, this.renderer, this.duration);
|
|
459
1731
|
}
|
|
460
|
-
on(
|
|
461
|
-
this.clock.on(
|
|
1732
|
+
on(t, i) {
|
|
1733
|
+
this.clock.on(t, i);
|
|
462
1734
|
}
|
|
463
1735
|
// Add more methods as needed
|
|
464
1736
|
}
|
|
465
1737
|
export {
|
|
466
|
-
|
|
1738
|
+
Ei as WebCodecsPlayer
|
|
467
1739
|
};
|
|
468
1740
|
//# sourceMappingURL=index.js.map
|