@it-compiles/anima 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,2420 @@
1
+ import { A as AnimCtxImpl } from "./player-MRRNy8I9.js";
2
+ import { S, V, b, a, i, l, m, v } from "./player-MRRNy8I9.js";
3
+ const linear = (u) => {
4
+ return u;
5
+ };
6
+ const easeIn = (u) => {
7
+ return u * u;
8
+ };
9
+ const easeOut = (u) => {
10
+ return u * (2 - u);
11
+ };
12
+ const easeInOut = (u) => {
13
+ return u < 0.5 ? 2 * u * u : -1 + (4 - 2 * u) * u;
14
+ };
15
+ const easeCirc = (u) => {
16
+ return 1 - Math.sqrt(1 - u * u);
17
+ };
18
+ const easeBack = (u) => {
19
+ const s = 1.70158;
20
+ return u * u * ((s + 1) * u - s);
21
+ };
22
+ const easeBounce = (u) => {
23
+ const n1 = 7.5625;
24
+ const d1 = 2.75;
25
+ if (u < 1 / d1) {
26
+ return n1 * u * u;
27
+ } else if (u < 2 / d1) {
28
+ return n1 * (u -= 1.5 / d1) * u + 0.75;
29
+ } else if (u < 2.5 / d1) {
30
+ return n1 * (u -= 2.25 / d1) * u + 0.9375;
31
+ } else {
32
+ return n1 * (u -= 2.625 / d1) * u + 0.984375;
33
+ }
34
+ };
35
+ const easeElastic = (u) => {
36
+ const c4 = 2 * Math.PI / 3;
37
+ return u === 0 ? 0 : u === 1 ? 1 : Math.pow(2, -10 * u) * Math.sin((u * 10 - 0.75) * c4) + 1;
38
+ };
39
+ const overShoot = (u) => {
40
+ const s = 1.70158 * 1.525;
41
+ if (u < 0.5) {
42
+ return Math.pow(2 * u, 2) * ((s + 1) * 2 * u - s) / 2;
43
+ } else {
44
+ return (Math.pow(2 * u - 2, 2) * ((s + 1) * (u * 2 - 2) + s) + 2) / 2;
45
+ }
46
+ };
47
+ function makeScene() {
48
+ const nodes = /* @__PURE__ */ new Map();
49
+ function text(content, opts = {}) {
50
+ const obj = { id: /* @__PURE__ */ Symbol("text") };
51
+ const font = opts.font ?? "16px sans-serif";
52
+ const size = opts.size ?? 16;
53
+ const canvas = document.createElement("canvas");
54
+ const g = canvas.getContext("2d");
55
+ g.font = font;
56
+ const metrics = g.measureText(content);
57
+ const w = metrics.width;
58
+ const h = size;
59
+ const baseline = size * 0.8;
60
+ const bounds = { x: 0, y: 0, w, h };
61
+ const initial = {
62
+ translate: opts.at ?? [0, 0],
63
+ rotate: 0,
64
+ scale: 1
65
+ };
66
+ nodes.set(obj, {
67
+ kind: "text",
68
+ bounds,
69
+ baseline,
70
+ initial,
71
+ draw(g2, t, opacity) {
72
+ g2.save();
73
+ g2.globalAlpha = opacity;
74
+ g2.translate(t.translate[0], t.translate[1]);
75
+ g2.rotate(t.rotate);
76
+ const sx = (t.scaleX ?? 1) * t.scale;
77
+ const sy = (t.scaleY ?? 1) * t.scale;
78
+ g2.scale(sx, sy);
79
+ g2.font = font;
80
+ g2.fillStyle = opts.color ?? "black";
81
+ g2.fillText(content, 0, baseline);
82
+ g2.restore();
83
+ }
84
+ });
85
+ return obj;
86
+ }
87
+ function rect(opts) {
88
+ const obj = { id: /* @__PURE__ */ Symbol("rect") };
89
+ const { width, height, fill, stroke, strokeWidth = 1, cornerRadius = 0 } = opts;
90
+ const bounds = { x: -width / 2, y: -height / 2, w: width, h: height };
91
+ const initial = {
92
+ translate: opts.at ?? [0, 0],
93
+ rotate: 0,
94
+ scale: 1
95
+ };
96
+ nodes.set(obj, {
97
+ kind: "rect",
98
+ bounds,
99
+ initial,
100
+ draw(g, t, opacity) {
101
+ g.save();
102
+ g.globalAlpha = opacity;
103
+ g.translate(t.translate[0], t.translate[1]);
104
+ g.rotate(t.rotate);
105
+ const sx = (t.scaleX ?? 1) * t.scale;
106
+ const sy = (t.scaleY ?? 1) * t.scale;
107
+ g.scale(sx, sy);
108
+ const avgScale = (sx + sy) / 2;
109
+ const adjustedStrokeWidth = strokeWidth / avgScale;
110
+ const x = -width / 2;
111
+ const y = -height / 2;
112
+ if (cornerRadius > 0) {
113
+ g.beginPath();
114
+ g.roundRect(x, y, width, height, cornerRadius);
115
+ if (fill) {
116
+ g.fillStyle = fill;
117
+ g.fill();
118
+ }
119
+ if (stroke) {
120
+ g.strokeStyle = stroke;
121
+ g.lineWidth = adjustedStrokeWidth;
122
+ g.stroke();
123
+ }
124
+ } else {
125
+ if (fill) {
126
+ g.fillStyle = fill;
127
+ g.fillRect(x, y, width, height);
128
+ }
129
+ if (stroke) {
130
+ g.strokeStyle = stroke;
131
+ g.lineWidth = adjustedStrokeWidth;
132
+ g.strokeRect(x, y, width, height);
133
+ }
134
+ }
135
+ g.restore();
136
+ }
137
+ });
138
+ return obj;
139
+ }
140
+ function resolveAnchor(obj, anchor) {
141
+ const node = nodes.get(obj);
142
+ const { bounds, baseline = 0 } = node;
143
+ const left = bounds.x;
144
+ const right = bounds.x + bounds.w;
145
+ const top = bounds.y;
146
+ const bottom = bounds.y + bounds.h;
147
+ const centerX = bounds.x + bounds.w / 2;
148
+ const centerY = bounds.y + bounds.h / 2;
149
+ switch (anchor) {
150
+ case "origin":
151
+ return [0, 0];
152
+ // Transform origin is always at (0, 0)
153
+ case "topLeft":
154
+ return [left, top];
155
+ case "top":
156
+ return [centerX, top];
157
+ case "topRight":
158
+ return [right, top];
159
+ case "left":
160
+ return [left, centerY];
161
+ case "center":
162
+ return [centerX, centerY];
163
+ case "right":
164
+ return [right, centerY];
165
+ case "bottomLeft":
166
+ return [left, bottom];
167
+ case "bottom":
168
+ return [centerX, bottom];
169
+ case "bottomRight":
170
+ return [right, bottom];
171
+ case "baselineLeft":
172
+ return [left, baseline];
173
+ case "baseline":
174
+ return [centerX, baseline];
175
+ }
176
+ }
177
+ function render(ctx, canvas, dpr) {
178
+ const g = canvas.getContext("2d");
179
+ const scale = dpr ?? window.devicePixelRatio ?? 1;
180
+ g.clearRect(0, 0, canvas.width, canvas.height);
181
+ g.save();
182
+ g.scale(scale, scale);
183
+ for (const [obj, node] of nodes) {
184
+ const t = ctx.getTransform(obj);
185
+ if (!t || !t.translate) continue;
186
+ const opacity = ctx.getOpacity(obj);
187
+ node.draw(g, t, opacity);
188
+ }
189
+ g.restore();
190
+ }
191
+ function bg(color) {
192
+ return rect({
193
+ at: [1280 / 2, 720 / 2],
194
+ // Screen.center()
195
+ width: 1280,
196
+ height: 720,
197
+ fill: color
198
+ });
199
+ }
200
+ function image(srcInput, opts = { at: [0, 0] }) {
201
+ const obj = { id: /* @__PURE__ */ Symbol("image") };
202
+ let loaded = false;
203
+ let drawable = null;
204
+ let naturalWidth = opts?.width ?? 100;
205
+ let naturalHeight = opts?.height ?? 100;
206
+ const updateBounds = (w, h) => {
207
+ const node = nodes.get(obj);
208
+ if (node) {
209
+ const width2 = opts?.width ?? w;
210
+ const height2 = opts?.height ?? h;
211
+ node.bounds = { x: -width2 / 2, y: -height2 / 2, w: width2, h: height2 };
212
+ }
213
+ };
214
+ if (typeof srcInput === "string") {
215
+ const img = new Image();
216
+ img.onload = () => {
217
+ loaded = true;
218
+ drawable = img;
219
+ naturalWidth = img.naturalWidth;
220
+ naturalHeight = img.naturalHeight;
221
+ updateBounds(naturalWidth, naturalHeight);
222
+ };
223
+ img.onerror = () => {
224
+ console.error(`Failed to load image: ${srcInput}`);
225
+ };
226
+ img.src = srcInput;
227
+ } else if (srcInput instanceof HTMLImageElement) {
228
+ drawable = srcInput;
229
+ if (srcInput.complete && srcInput.naturalWidth > 0) {
230
+ loaded = true;
231
+ naturalWidth = srcInput.naturalWidth;
232
+ naturalHeight = srcInput.naturalHeight;
233
+ } else {
234
+ srcInput.onload = () => {
235
+ loaded = true;
236
+ naturalWidth = srcInput.naturalWidth;
237
+ naturalHeight = srcInput.naturalHeight;
238
+ updateBounds(naturalWidth, naturalHeight);
239
+ };
240
+ }
241
+ } else if (srcInput instanceof ImageBitmap) {
242
+ loaded = true;
243
+ drawable = srcInput;
244
+ naturalWidth = srcInput.width;
245
+ naturalHeight = srcInput.height;
246
+ } else if (srcInput instanceof HTMLCanvasElement) {
247
+ loaded = true;
248
+ drawable = srcInput;
249
+ naturalWidth = srcInput.width;
250
+ naturalHeight = srcInput.height;
251
+ } else if (typeof srcInput === "object" && "kind" in srcInput) {
252
+ const src = srcInput;
253
+ if (src.kind === "url") {
254
+ const img = new Image();
255
+ img.onload = () => {
256
+ loaded = true;
257
+ drawable = img;
258
+ naturalWidth = img.naturalWidth;
259
+ naturalHeight = img.naturalHeight;
260
+ updateBounds(naturalWidth, naturalHeight);
261
+ };
262
+ img.onerror = () => {
263
+ console.error(`Failed to load image: ${src.src}`);
264
+ };
265
+ img.src = src.src;
266
+ } else if (src.kind === "image") {
267
+ drawable = src.image;
268
+ if (src.image.complete && src.image.naturalWidth > 0) {
269
+ loaded = true;
270
+ naturalWidth = src.image.naturalWidth;
271
+ naturalHeight = src.image.naturalHeight;
272
+ } else {
273
+ src.image.onload = () => {
274
+ loaded = true;
275
+ naturalWidth = src.image.naturalWidth;
276
+ naturalHeight = src.image.naturalHeight;
277
+ updateBounds(naturalWidth, naturalHeight);
278
+ };
279
+ }
280
+ } else if (src.kind === "bitmap") {
281
+ loaded = true;
282
+ drawable = src.bitmap;
283
+ naturalWidth = src.bitmap.width;
284
+ naturalHeight = src.bitmap.height;
285
+ } else if (src.kind === "canvas") {
286
+ loaded = true;
287
+ drawable = src.canvas;
288
+ naturalWidth = src.canvas.width;
289
+ naturalHeight = src.canvas.height;
290
+ }
291
+ }
292
+ const width = opts?.width ?? naturalWidth;
293
+ const height = opts?.height ?? naturalHeight;
294
+ const bounds = { x: -width / 2, y: -height / 2, w: width, h: height };
295
+ const initial = {
296
+ translate: opts?.at ?? [0, 0],
297
+ rotate: 0,
298
+ scale: 1
299
+ };
300
+ nodes.set(obj, {
301
+ kind: "image",
302
+ bounds,
303
+ initial,
304
+ draw(g, t, opacity) {
305
+ g.save();
306
+ g.globalAlpha = opacity;
307
+ g.translate(t.translate[0], t.translate[1]);
308
+ g.rotate(t.rotate);
309
+ const sx = (t.scaleX ?? 1) * t.scale;
310
+ const sy = (t.scaleY ?? 1) * t.scale;
311
+ g.scale(sx, sy);
312
+ const node = nodes.get(obj);
313
+ const drawWidth = node.bounds.w;
314
+ const drawHeight = node.bounds.h;
315
+ const x = -drawWidth / 2;
316
+ const y = -drawHeight / 2;
317
+ if (loaded && drawable) {
318
+ g.drawImage(drawable, x, y, drawWidth, drawHeight);
319
+ } else {
320
+ g.fillStyle = "#e0e0e0";
321
+ g.fillRect(x, y, drawWidth, drawHeight);
322
+ g.strokeStyle = "#c0c0c0";
323
+ g.lineWidth = 1;
324
+ g.beginPath();
325
+ const step = 20;
326
+ for (let i2 = -drawHeight; i2 < drawWidth; i2 += step) {
327
+ g.moveTo(x + i2, y);
328
+ g.lineTo(x + i2 + drawHeight, y + drawHeight);
329
+ }
330
+ g.stroke();
331
+ }
332
+ g.restore();
333
+ }
334
+ });
335
+ return obj;
336
+ }
337
+ return {
338
+ text,
339
+ rect,
340
+ bg,
341
+ image,
342
+ objects: () => nodes.keys(),
343
+ resolveAnchor,
344
+ getInitialTransform: (obj) => nodes.get(obj).initial,
345
+ render,
346
+ getBounds: (obj) => {
347
+ const node = nodes.get(obj);
348
+ if (!node) throw new Error("Object not found in scene");
349
+ return node.bounds;
350
+ },
351
+ getBaseline: (obj) => {
352
+ const node = nodes.get(obj);
353
+ if (!node) return null;
354
+ return node.baseline ?? null;
355
+ }
356
+ };
357
+ }
358
+ function timeline(build) {
359
+ const segs = [];
360
+ const marks = /* @__PURE__ */ Object.create(null);
361
+ let now = 0;
362
+ const api = {
363
+ seq(fn) {
364
+ fn();
365
+ },
366
+ mark(name) {
367
+ marks[name] = now;
368
+ },
369
+ tween(dur, ease, body) {
370
+ const start = now, end = start + dur;
371
+ segs.push({ start, end, ease: ease ?? ((u) => u), body });
372
+ now = end;
373
+ },
374
+ instant(body) {
375
+ const t = now;
376
+ segs.push({ start: t, end: t, ease: (u) => u, body: (ctx, _u) => body(ctx) });
377
+ },
378
+ wait(dur) {
379
+ const start = now, end = start + dur;
380
+ segs.push({ start, end, ease: (u) => u, body: () => {
381
+ } });
382
+ now = end;
383
+ },
384
+ with(name, offsetMs, fn) {
385
+ const saved = now;
386
+ now = (marks[name] ?? 0) + offsetMs;
387
+ fn();
388
+ now = saved;
389
+ },
390
+ overlap(offsetMs, fn) {
391
+ const saved = now;
392
+ now = now + offsetMs;
393
+ fn();
394
+ now = saved;
395
+ },
396
+ par(fn) {
397
+ const start = now;
398
+ let maxEnd = start;
399
+ const saved = now;
400
+ now = start;
401
+ fn();
402
+ maxEnd = Math.max(maxEnd, now);
403
+ now = saved;
404
+ now = maxEnd;
405
+ }
406
+ };
407
+ build(api);
408
+ segs.sort((a2, b2) => a2.start - b2.start);
409
+ const duration = segs.reduce((m2, s) => Math.max(m2, s.end), 0);
410
+ return {
411
+ duration,
412
+ segs,
413
+ evaluate(tMs, ctx) {
414
+ for (const s of segs) {
415
+ if (tMs < s.start) continue;
416
+ const uRaw = Math.min(1, (tMs - s.start) / (s.end - s.start || 1));
417
+ const u = s.ease(Math.max(0, uRaw));
418
+ s.body(ctx, u);
419
+ }
420
+ }
421
+ };
422
+ }
423
+ var __accessCheck = (obj, member, msg) => {
424
+ if (!member.has(obj))
425
+ throw TypeError("Cannot " + msg);
426
+ };
427
+ var __privateGet = (obj, member, getter) => {
428
+ __accessCheck(obj, member, "read from private field");
429
+ return getter ? getter.call(obj) : member.get(obj);
430
+ };
431
+ var __privateAdd = (obj, member, value) => {
432
+ if (member.has(obj))
433
+ throw TypeError("Cannot add the same private member more than once");
434
+ member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
435
+ };
436
+ var __privateSet = (obj, member, value, setter) => {
437
+ __accessCheck(obj, member, "write to private field");
438
+ member.set(obj, value);
439
+ return value;
440
+ };
441
+ var __privateWrapper = (obj, member, setter, getter) => ({
442
+ set _(value) {
443
+ __privateSet(obj, member, value);
444
+ },
445
+ get _() {
446
+ return __privateGet(obj, member, getter);
447
+ }
448
+ });
449
+ var __privateMethod = (obj, member, method) => {
450
+ __accessCheck(obj, member, "access private method");
451
+ return method;
452
+ };
453
+ var bytes = new Uint8Array(8);
454
+ var view = new DataView(bytes.buffer);
455
+ var u8 = (value) => {
456
+ return [(value % 256 + 256) % 256];
457
+ };
458
+ var u16 = (value) => {
459
+ view.setUint16(0, value, false);
460
+ return [bytes[0], bytes[1]];
461
+ };
462
+ var i16 = (value) => {
463
+ view.setInt16(0, value, false);
464
+ return [bytes[0], bytes[1]];
465
+ };
466
+ var u24 = (value) => {
467
+ view.setUint32(0, value, false);
468
+ return [bytes[1], bytes[2], bytes[3]];
469
+ };
470
+ var u32 = (value) => {
471
+ view.setUint32(0, value, false);
472
+ return [bytes[0], bytes[1], bytes[2], bytes[3]];
473
+ };
474
+ var i32 = (value) => {
475
+ view.setInt32(0, value, false);
476
+ return [bytes[0], bytes[1], bytes[2], bytes[3]];
477
+ };
478
+ var u64 = (value) => {
479
+ view.setUint32(0, Math.floor(value / 2 ** 32), false);
480
+ view.setUint32(4, value, false);
481
+ return [bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5], bytes[6], bytes[7]];
482
+ };
483
+ var fixed_8_8 = (value) => {
484
+ view.setInt16(0, 2 ** 8 * value, false);
485
+ return [bytes[0], bytes[1]];
486
+ };
487
+ var fixed_16_16 = (value) => {
488
+ view.setInt32(0, 2 ** 16 * value, false);
489
+ return [bytes[0], bytes[1], bytes[2], bytes[3]];
490
+ };
491
+ var fixed_2_30 = (value) => {
492
+ view.setInt32(0, 2 ** 30 * value, false);
493
+ return [bytes[0], bytes[1], bytes[2], bytes[3]];
494
+ };
495
+ var ascii = (text, nullTerminated = false) => {
496
+ let bytes2 = Array(text.length).fill(null).map((_, i2) => text.charCodeAt(i2));
497
+ if (nullTerminated)
498
+ bytes2.push(0);
499
+ return bytes2;
500
+ };
501
+ var last = (arr) => {
502
+ return arr && arr[arr.length - 1];
503
+ };
504
+ var lastPresentedSample = (samples) => {
505
+ let result = void 0;
506
+ for (let sample of samples) {
507
+ if (!result || sample.presentationTimestamp > result.presentationTimestamp) {
508
+ result = sample;
509
+ }
510
+ }
511
+ return result;
512
+ };
513
+ var intoTimescale = (timeInSeconds, timescale, round = true) => {
514
+ let value = timeInSeconds * timescale;
515
+ return round ? Math.round(value) : value;
516
+ };
517
+ var rotationMatrix = (rotationInDegrees) => {
518
+ let theta = rotationInDegrees * (Math.PI / 180);
519
+ let cosTheta = Math.cos(theta);
520
+ let sinTheta = Math.sin(theta);
521
+ return [
522
+ cosTheta,
523
+ sinTheta,
524
+ 0,
525
+ -sinTheta,
526
+ cosTheta,
527
+ 0,
528
+ 0,
529
+ 0,
530
+ 1
531
+ ];
532
+ };
533
+ var IDENTITY_MATRIX = rotationMatrix(0);
534
+ var matrixToBytes = (matrix) => {
535
+ return [
536
+ fixed_16_16(matrix[0]),
537
+ fixed_16_16(matrix[1]),
538
+ fixed_2_30(matrix[2]),
539
+ fixed_16_16(matrix[3]),
540
+ fixed_16_16(matrix[4]),
541
+ fixed_2_30(matrix[5]),
542
+ fixed_16_16(matrix[6]),
543
+ fixed_16_16(matrix[7]),
544
+ fixed_2_30(matrix[8])
545
+ ];
546
+ };
547
+ var deepClone = (x) => {
548
+ if (!x)
549
+ return x;
550
+ if (typeof x !== "object")
551
+ return x;
552
+ if (Array.isArray(x))
553
+ return x.map(deepClone);
554
+ return Object.fromEntries(Object.entries(x).map(([key, value]) => [key, deepClone(value)]));
555
+ };
556
+ var isU32 = (value) => {
557
+ return value >= 0 && value < 2 ** 32;
558
+ };
559
+ var box = (type, contents, children) => ({
560
+ type,
561
+ contents: contents && new Uint8Array(contents.flat(10)),
562
+ children
563
+ });
564
+ var fullBox = (type, version, flags, contents, children) => box(
565
+ type,
566
+ [u8(version), u24(flags), contents ?? []],
567
+ children
568
+ );
569
+ var ftyp = (details) => {
570
+ let minorVersion = 512;
571
+ if (details.fragmented)
572
+ return box("ftyp", [
573
+ ascii("iso5"),
574
+ // Major brand
575
+ u32(minorVersion),
576
+ // Minor version
577
+ // Compatible brands
578
+ ascii("iso5"),
579
+ ascii("iso6"),
580
+ ascii("mp41")
581
+ ]);
582
+ return box("ftyp", [
583
+ ascii("isom"),
584
+ // Major brand
585
+ u32(minorVersion),
586
+ // Minor version
587
+ // Compatible brands
588
+ ascii("isom"),
589
+ details.holdsAvc ? ascii("avc1") : [],
590
+ ascii("mp41")
591
+ ]);
592
+ };
593
+ var mdat = (reserveLargeSize) => ({ type: "mdat", largeSize: reserveLargeSize });
594
+ var free = (size) => ({ type: "free", size });
595
+ var moov = (tracks, creationTime, fragmented = false) => box("moov", null, [
596
+ mvhd(creationTime, tracks),
597
+ ...tracks.map((x) => trak(x, creationTime)),
598
+ fragmented ? mvex(tracks) : null
599
+ ]);
600
+ var mvhd = (creationTime, tracks) => {
601
+ let duration = intoTimescale(Math.max(
602
+ 0,
603
+ ...tracks.filter((x) => x.samples.length > 0).map((x) => {
604
+ const lastSample = lastPresentedSample(x.samples);
605
+ return lastSample.presentationTimestamp + lastSample.duration;
606
+ })
607
+ ), GLOBAL_TIMESCALE);
608
+ let nextTrackId = Math.max(...tracks.map((x) => x.id)) + 1;
609
+ let needsU64 = !isU32(creationTime) || !isU32(duration);
610
+ let u32OrU64 = needsU64 ? u64 : u32;
611
+ return fullBox("mvhd", +needsU64, 0, [
612
+ u32OrU64(creationTime),
613
+ // Creation time
614
+ u32OrU64(creationTime),
615
+ // Modification time
616
+ u32(GLOBAL_TIMESCALE),
617
+ // Timescale
618
+ u32OrU64(duration),
619
+ // Duration
620
+ fixed_16_16(1),
621
+ // Preferred rate
622
+ fixed_8_8(1),
623
+ // Preferred volume
624
+ Array(10).fill(0),
625
+ // Reserved
626
+ matrixToBytes(IDENTITY_MATRIX),
627
+ // Matrix
628
+ Array(24).fill(0),
629
+ // Pre-defined
630
+ u32(nextTrackId)
631
+ // Next track ID
632
+ ]);
633
+ };
634
+ var trak = (track, creationTime) => box("trak", null, [
635
+ tkhd(track, creationTime),
636
+ mdia(track, creationTime)
637
+ ]);
638
+ var tkhd = (track, creationTime) => {
639
+ let lastSample = lastPresentedSample(track.samples);
640
+ let durationInGlobalTimescale = intoTimescale(
641
+ lastSample ? lastSample.presentationTimestamp + lastSample.duration : 0,
642
+ GLOBAL_TIMESCALE
643
+ );
644
+ let needsU64 = !isU32(creationTime) || !isU32(durationInGlobalTimescale);
645
+ let u32OrU64 = needsU64 ? u64 : u32;
646
+ let matrix;
647
+ if (track.info.type === "video") {
648
+ matrix = typeof track.info.rotation === "number" ? rotationMatrix(track.info.rotation) : track.info.rotation;
649
+ } else {
650
+ matrix = IDENTITY_MATRIX;
651
+ }
652
+ return fullBox("tkhd", +needsU64, 3, [
653
+ u32OrU64(creationTime),
654
+ // Creation time
655
+ u32OrU64(creationTime),
656
+ // Modification time
657
+ u32(track.id),
658
+ // Track ID
659
+ u32(0),
660
+ // Reserved
661
+ u32OrU64(durationInGlobalTimescale),
662
+ // Duration
663
+ Array(8).fill(0),
664
+ // Reserved
665
+ u16(0),
666
+ // Layer
667
+ u16(0),
668
+ // Alternate group
669
+ fixed_8_8(track.info.type === "audio" ? 1 : 0),
670
+ // Volume
671
+ u16(0),
672
+ // Reserved
673
+ matrixToBytes(matrix),
674
+ // Matrix
675
+ fixed_16_16(track.info.type === "video" ? track.info.width : 0),
676
+ // Track width
677
+ fixed_16_16(track.info.type === "video" ? track.info.height : 0)
678
+ // Track height
679
+ ]);
680
+ };
681
+ var mdia = (track, creationTime) => box("mdia", null, [
682
+ mdhd(track, creationTime),
683
+ hdlr(track.info.type === "video" ? "vide" : "soun"),
684
+ minf(track)
685
+ ]);
686
+ var mdhd = (track, creationTime) => {
687
+ let lastSample = lastPresentedSample(track.samples);
688
+ let localDuration = intoTimescale(
689
+ lastSample ? lastSample.presentationTimestamp + lastSample.duration : 0,
690
+ track.timescale
691
+ );
692
+ let needsU64 = !isU32(creationTime) || !isU32(localDuration);
693
+ let u32OrU64 = needsU64 ? u64 : u32;
694
+ return fullBox("mdhd", +needsU64, 0, [
695
+ u32OrU64(creationTime),
696
+ // Creation time
697
+ u32OrU64(creationTime),
698
+ // Modification time
699
+ u32(track.timescale),
700
+ // Timescale
701
+ u32OrU64(localDuration),
702
+ // Duration
703
+ u16(21956),
704
+ // Language ("und", undetermined)
705
+ u16(0)
706
+ // Quality
707
+ ]);
708
+ };
709
+ var hdlr = (componentSubtype) => fullBox("hdlr", 0, 0, [
710
+ ascii("mhlr"),
711
+ // Component type
712
+ ascii(componentSubtype),
713
+ // Component subtype
714
+ u32(0),
715
+ // Component manufacturer
716
+ u32(0),
717
+ // Component flags
718
+ u32(0),
719
+ // Component flags mask
720
+ ascii("mp4-muxer-hdlr", true)
721
+ // Component name
722
+ ]);
723
+ var minf = (track) => box("minf", null, [
724
+ track.info.type === "video" ? vmhd() : smhd(),
725
+ dinf(),
726
+ stbl(track)
727
+ ]);
728
+ var vmhd = () => fullBox("vmhd", 0, 1, [
729
+ u16(0),
730
+ // Graphics mode
731
+ u16(0),
732
+ // Opcolor R
733
+ u16(0),
734
+ // Opcolor G
735
+ u16(0)
736
+ // Opcolor B
737
+ ]);
738
+ var smhd = () => fullBox("smhd", 0, 0, [
739
+ u16(0),
740
+ // Balance
741
+ u16(0)
742
+ // Reserved
743
+ ]);
744
+ var dinf = () => box("dinf", null, [
745
+ dref()
746
+ ]);
747
+ var dref = () => fullBox("dref", 0, 0, [
748
+ u32(1)
749
+ // Entry count
750
+ ], [
751
+ url()
752
+ ]);
753
+ var url = () => fullBox("url ", 0, 1);
754
+ var stbl = (track) => {
755
+ const needsCtts = track.compositionTimeOffsetTable.length > 1 || track.compositionTimeOffsetTable.some((x) => x.sampleCompositionTimeOffset !== 0);
756
+ return box("stbl", null, [
757
+ stsd(track),
758
+ stts(track),
759
+ stss(track),
760
+ stsc(track),
761
+ stsz(track),
762
+ stco(track),
763
+ needsCtts ? ctts(track) : null
764
+ ]);
765
+ };
766
+ var stsd = (track) => fullBox("stsd", 0, 0, [
767
+ u32(1)
768
+ // Entry count
769
+ ], [
770
+ track.info.type === "video" ? videoSampleDescription(
771
+ VIDEO_CODEC_TO_BOX_NAME[track.info.codec],
772
+ track
773
+ ) : soundSampleDescription(
774
+ AUDIO_CODEC_TO_BOX_NAME[track.info.codec],
775
+ track
776
+ )
777
+ ]);
778
+ var videoSampleDescription = (compressionType, track) => box(compressionType, [
779
+ Array(6).fill(0),
780
+ // Reserved
781
+ u16(1),
782
+ // Data reference index
783
+ u16(0),
784
+ // Pre-defined
785
+ u16(0),
786
+ // Reserved
787
+ Array(12).fill(0),
788
+ // Pre-defined
789
+ u16(track.info.width),
790
+ // Width
791
+ u16(track.info.height),
792
+ // Height
793
+ u32(4718592),
794
+ // Horizontal resolution
795
+ u32(4718592),
796
+ // Vertical resolution
797
+ u32(0),
798
+ // Reserved
799
+ u16(1),
800
+ // Frame count
801
+ Array(32).fill(0),
802
+ // Compressor name
803
+ u16(24),
804
+ // Depth
805
+ i16(65535)
806
+ // Pre-defined
807
+ ], [
808
+ VIDEO_CODEC_TO_CONFIGURATION_BOX[track.info.codec](track),
809
+ track.info.decoderConfig.colorSpace ? colr(track) : null
810
+ ]);
811
+ var COLOR_PRIMARIES_MAP = {
812
+ "bt709": 1,
813
+ // ITU-R BT.709
814
+ "bt470bg": 5,
815
+ // ITU-R BT.470BG
816
+ "smpte170m": 6
817
+ // ITU-R BT.601 525 - SMPTE 170M
818
+ };
819
+ var TRANSFER_CHARACTERISTICS_MAP = {
820
+ "bt709": 1,
821
+ // ITU-R BT.709
822
+ "smpte170m": 6,
823
+ // SMPTE 170M
824
+ "iec61966-2-1": 13
825
+ // IEC 61966-2-1
826
+ };
827
+ var MATRIX_COEFFICIENTS_MAP = {
828
+ "rgb": 0,
829
+ // Identity
830
+ "bt709": 1,
831
+ // ITU-R BT.709
832
+ "bt470bg": 5,
833
+ // ITU-R BT.470BG
834
+ "smpte170m": 6
835
+ // SMPTE 170M
836
+ };
837
+ var colr = (track) => box("colr", [
838
+ ascii("nclx"),
839
+ // Colour type
840
+ u16(COLOR_PRIMARIES_MAP[track.info.decoderConfig.colorSpace.primaries]),
841
+ // Colour primaries
842
+ u16(TRANSFER_CHARACTERISTICS_MAP[track.info.decoderConfig.colorSpace.transfer]),
843
+ // Transfer characteristics
844
+ u16(MATRIX_COEFFICIENTS_MAP[track.info.decoderConfig.colorSpace.matrix]),
845
+ // Matrix coefficients
846
+ u8((track.info.decoderConfig.colorSpace.fullRange ? 1 : 0) << 7)
847
+ // Full range flag
848
+ ]);
849
+ var avcC = (track) => track.info.decoderConfig && box("avcC", [
850
+ // For AVC, description is an AVCDecoderConfigurationRecord, so nothing else to do here
851
+ ...new Uint8Array(track.info.decoderConfig.description)
852
+ ]);
853
+ var hvcC = (track) => track.info.decoderConfig && box("hvcC", [
854
+ // For HEVC, description is a HEVCDecoderConfigurationRecord, so nothing else to do here
855
+ ...new Uint8Array(track.info.decoderConfig.description)
856
+ ]);
857
+ var vpcC = (track) => {
858
+ if (!track.info.decoderConfig) {
859
+ return null;
860
+ }
861
+ let decoderConfig = track.info.decoderConfig;
862
+ if (!decoderConfig.colorSpace) {
863
+ throw new Error(`'colorSpace' is required in the decoder config for VP9.`);
864
+ }
865
+ let parts = decoderConfig.codec.split(".");
866
+ let profile = Number(parts[1]);
867
+ let level = Number(parts[2]);
868
+ let bitDepth = Number(parts[3]);
869
+ let chromaSubsampling = 0;
870
+ let thirdByte = (bitDepth << 4) + (chromaSubsampling << 1) + Number(decoderConfig.colorSpace.fullRange);
871
+ let colourPrimaries = 2;
872
+ let transferCharacteristics = 2;
873
+ let matrixCoefficients = 2;
874
+ return fullBox("vpcC", 1, 0, [
875
+ u8(profile),
876
+ // Profile
877
+ u8(level),
878
+ // Level
879
+ u8(thirdByte),
880
+ // Bit depth, chroma subsampling, full range
881
+ u8(colourPrimaries),
882
+ // Colour primaries
883
+ u8(transferCharacteristics),
884
+ // Transfer characteristics
885
+ u8(matrixCoefficients),
886
+ // Matrix coefficients
887
+ u16(0)
888
+ // Codec initialization data size
889
+ ]);
890
+ };
891
+ var av1C = () => {
892
+ let marker = 1;
893
+ let version = 1;
894
+ let firstByte = (marker << 7) + version;
895
+ return box("av1C", [
896
+ firstByte,
897
+ 0,
898
+ 0,
899
+ 0
900
+ ]);
901
+ };
902
+ var soundSampleDescription = (compressionType, track) => box(compressionType, [
903
+ Array(6).fill(0),
904
+ // Reserved
905
+ u16(1),
906
+ // Data reference index
907
+ u16(0),
908
+ // Version
909
+ u16(0),
910
+ // Revision level
911
+ u32(0),
912
+ // Vendor
913
+ u16(track.info.numberOfChannels),
914
+ // Number of channels
915
+ u16(16),
916
+ // Sample size (bits)
917
+ u16(0),
918
+ // Compression ID
919
+ u16(0),
920
+ // Packet size
921
+ fixed_16_16(track.info.sampleRate)
922
+ // Sample rate
923
+ ], [
924
+ AUDIO_CODEC_TO_CONFIGURATION_BOX[track.info.codec](track)
925
+ ]);
926
+ var esds = (track) => {
927
+ let description = new Uint8Array(track.info.decoderConfig.description);
928
+ return fullBox("esds", 0, 0, [
929
+ // https://stackoverflow.com/a/54803118
930
+ u32(58753152),
931
+ // TAG(3) = Object Descriptor ([2])
932
+ u8(32 + description.byteLength),
933
+ // length of this OD (which includes the next 2 tags)
934
+ u16(1),
935
+ // ES_ID = 1
936
+ u8(0),
937
+ // flags etc = 0
938
+ u32(75530368),
939
+ // TAG(4) = ES Descriptor ([2]) embedded in above OD
940
+ u8(18 + description.byteLength),
941
+ // length of this ESD
942
+ u8(64),
943
+ // MPEG-4 Audio
944
+ u8(21),
945
+ // stream type(6bits)=5 audio, flags(2bits)=1
946
+ u24(0),
947
+ // 24bit buffer size
948
+ u32(130071),
949
+ // max bitrate
950
+ u32(130071),
951
+ // avg bitrate
952
+ u32(92307584),
953
+ // TAG(5) = ASC ([2],[3]) embedded in above OD
954
+ u8(description.byteLength),
955
+ // length
956
+ ...description,
957
+ u32(109084800),
958
+ // TAG(6)
959
+ u8(1),
960
+ // length
961
+ u8(2)
962
+ // data
963
+ ]);
964
+ };
965
+ var dOps = (track) => {
966
+ let preskip = 3840;
967
+ let gain = 0;
968
+ const description = track.info.decoderConfig?.description;
969
+ if (description) {
970
+ if (description.byteLength < 18) {
971
+ throw new TypeError("Invalid decoder description provided for Opus; must be at least 18 bytes long.");
972
+ }
973
+ const view2 = ArrayBuffer.isView(description) ? new DataView(description.buffer, description.byteOffset, description.byteLength) : new DataView(description);
974
+ preskip = view2.getUint16(10, true);
975
+ gain = view2.getInt16(14, true);
976
+ }
977
+ return box("dOps", [
978
+ u8(0),
979
+ // Version
980
+ u8(track.info.numberOfChannels),
981
+ // OutputChannelCount
982
+ u16(preskip),
983
+ u32(track.info.sampleRate),
984
+ // InputSampleRate
985
+ fixed_8_8(gain),
986
+ // OutputGain
987
+ u8(0)
988
+ // ChannelMappingFamily
989
+ ]);
990
+ };
991
+ var stts = (track) => {
992
+ return fullBox("stts", 0, 0, [
993
+ u32(track.timeToSampleTable.length),
994
+ // Number of entries
995
+ track.timeToSampleTable.map((x) => [
996
+ // Time-to-sample table
997
+ u32(x.sampleCount),
998
+ // Sample count
999
+ u32(x.sampleDelta)
1000
+ // Sample duration
1001
+ ])
1002
+ ]);
1003
+ };
1004
+ var stss = (track) => {
1005
+ if (track.samples.every((x) => x.type === "key"))
1006
+ return null;
1007
+ let keySamples = [...track.samples.entries()].filter(([, sample]) => sample.type === "key");
1008
+ return fullBox("stss", 0, 0, [
1009
+ u32(keySamples.length),
1010
+ // Number of entries
1011
+ keySamples.map(([index]) => u32(index + 1))
1012
+ // Sync sample table
1013
+ ]);
1014
+ };
1015
+ var stsc = (track) => {
1016
+ return fullBox("stsc", 0, 0, [
1017
+ u32(track.compactlyCodedChunkTable.length),
1018
+ // Number of entries
1019
+ track.compactlyCodedChunkTable.map((x) => [
1020
+ // Sample-to-chunk table
1021
+ u32(x.firstChunk),
1022
+ // First chunk
1023
+ u32(x.samplesPerChunk),
1024
+ // Samples per chunk
1025
+ u32(1)
1026
+ // Sample description index
1027
+ ])
1028
+ ]);
1029
+ };
1030
+ var stsz = (track) => fullBox("stsz", 0, 0, [
1031
+ u32(0),
1032
+ // Sample size (0 means non-constant size)
1033
+ u32(track.samples.length),
1034
+ // Number of entries
1035
+ track.samples.map((x) => u32(x.size))
1036
+ // Sample size table
1037
+ ]);
1038
+ var stco = (track) => {
1039
+ if (track.finalizedChunks.length > 0 && last(track.finalizedChunks).offset >= 2 ** 32) {
1040
+ return fullBox("co64", 0, 0, [
1041
+ u32(track.finalizedChunks.length),
1042
+ // Number of entries
1043
+ track.finalizedChunks.map((x) => u64(x.offset))
1044
+ // Chunk offset table
1045
+ ]);
1046
+ }
1047
+ return fullBox("stco", 0, 0, [
1048
+ u32(track.finalizedChunks.length),
1049
+ // Number of entries
1050
+ track.finalizedChunks.map((x) => u32(x.offset))
1051
+ // Chunk offset table
1052
+ ]);
1053
+ };
1054
+ var ctts = (track) => {
1055
+ return fullBox("ctts", 0, 0, [
1056
+ u32(track.compositionTimeOffsetTable.length),
1057
+ // Number of entries
1058
+ track.compositionTimeOffsetTable.map((x) => [
1059
+ // Time-to-sample table
1060
+ u32(x.sampleCount),
1061
+ // Sample count
1062
+ u32(x.sampleCompositionTimeOffset)
1063
+ // Sample offset
1064
+ ])
1065
+ ]);
1066
+ };
1067
+ var mvex = (tracks) => {
1068
+ return box("mvex", null, tracks.map(trex));
1069
+ };
1070
+ var trex = (track) => {
1071
+ return fullBox("trex", 0, 0, [
1072
+ u32(track.id),
1073
+ // Track ID
1074
+ u32(1),
1075
+ // Default sample description index
1076
+ u32(0),
1077
+ // Default sample duration
1078
+ u32(0),
1079
+ // Default sample size
1080
+ u32(0)
1081
+ // Default sample flags
1082
+ ]);
1083
+ };
1084
+ var moof = (sequenceNumber, tracks) => {
1085
+ return box("moof", null, [
1086
+ mfhd(sequenceNumber),
1087
+ ...tracks.map(traf)
1088
+ ]);
1089
+ };
1090
+ var mfhd = (sequenceNumber) => {
1091
+ return fullBox("mfhd", 0, 0, [
1092
+ u32(sequenceNumber)
1093
+ // Sequence number
1094
+ ]);
1095
+ };
1096
+ var fragmentSampleFlags = (sample) => {
1097
+ let byte1 = 0;
1098
+ let byte2 = 0;
1099
+ let byte3 = 0;
1100
+ let byte4 = 0;
1101
+ let sampleIsDifferenceSample = sample.type === "delta";
1102
+ byte2 |= +sampleIsDifferenceSample;
1103
+ if (sampleIsDifferenceSample) {
1104
+ byte1 |= 1;
1105
+ } else {
1106
+ byte1 |= 2;
1107
+ }
1108
+ return byte1 << 24 | byte2 << 16 | byte3 << 8 | byte4;
1109
+ };
1110
+ var traf = (track) => {
1111
+ return box("traf", null, [
1112
+ tfhd(track),
1113
+ tfdt(track),
1114
+ trun(track)
1115
+ ]);
1116
+ };
1117
+ var tfhd = (track) => {
1118
+ let tfFlags = 0;
1119
+ tfFlags |= 8;
1120
+ tfFlags |= 16;
1121
+ tfFlags |= 32;
1122
+ tfFlags |= 131072;
1123
+ let referenceSample = track.currentChunk.samples[1] ?? track.currentChunk.samples[0];
1124
+ let referenceSampleInfo = {
1125
+ duration: referenceSample.timescaleUnitsToNextSample,
1126
+ size: referenceSample.size,
1127
+ flags: fragmentSampleFlags(referenceSample)
1128
+ };
1129
+ return fullBox("tfhd", 0, tfFlags, [
1130
+ u32(track.id),
1131
+ // Track ID
1132
+ u32(referenceSampleInfo.duration),
1133
+ // Default sample duration
1134
+ u32(referenceSampleInfo.size),
1135
+ // Default sample size
1136
+ u32(referenceSampleInfo.flags)
1137
+ // Default sample flags
1138
+ ]);
1139
+ };
1140
+ var tfdt = (track) => {
1141
+ return fullBox("tfdt", 1, 0, [
1142
+ u64(intoTimescale(track.currentChunk.startTimestamp, track.timescale))
1143
+ // Base Media Decode Time
1144
+ ]);
1145
+ };
1146
+ var trun = (track) => {
1147
+ let allSampleDurations = track.currentChunk.samples.map((x) => x.timescaleUnitsToNextSample);
1148
+ let allSampleSizes = track.currentChunk.samples.map((x) => x.size);
1149
+ let allSampleFlags = track.currentChunk.samples.map(fragmentSampleFlags);
1150
+ let allSampleCompositionTimeOffsets = track.currentChunk.samples.map((x) => intoTimescale(x.presentationTimestamp - x.decodeTimestamp, track.timescale));
1151
+ let uniqueSampleDurations = new Set(allSampleDurations);
1152
+ let uniqueSampleSizes = new Set(allSampleSizes);
1153
+ let uniqueSampleFlags = new Set(allSampleFlags);
1154
+ let uniqueSampleCompositionTimeOffsets = new Set(allSampleCompositionTimeOffsets);
1155
+ let firstSampleFlagsPresent = uniqueSampleFlags.size === 2 && allSampleFlags[0] !== allSampleFlags[1];
1156
+ let sampleDurationPresent = uniqueSampleDurations.size > 1;
1157
+ let sampleSizePresent = uniqueSampleSizes.size > 1;
1158
+ let sampleFlagsPresent = !firstSampleFlagsPresent && uniqueSampleFlags.size > 1;
1159
+ let sampleCompositionTimeOffsetsPresent = uniqueSampleCompositionTimeOffsets.size > 1 || [...uniqueSampleCompositionTimeOffsets].some((x) => x !== 0);
1160
+ let flags = 0;
1161
+ flags |= 1;
1162
+ flags |= 4 * +firstSampleFlagsPresent;
1163
+ flags |= 256 * +sampleDurationPresent;
1164
+ flags |= 512 * +sampleSizePresent;
1165
+ flags |= 1024 * +sampleFlagsPresent;
1166
+ flags |= 2048 * +sampleCompositionTimeOffsetsPresent;
1167
+ return fullBox("trun", 1, flags, [
1168
+ u32(track.currentChunk.samples.length),
1169
+ // Sample count
1170
+ u32(track.currentChunk.offset - track.currentChunk.moofOffset || 0),
1171
+ // Data offset
1172
+ firstSampleFlagsPresent ? u32(allSampleFlags[0]) : [],
1173
+ track.currentChunk.samples.map((_, i2) => [
1174
+ sampleDurationPresent ? u32(allSampleDurations[i2]) : [],
1175
+ // Sample duration
1176
+ sampleSizePresent ? u32(allSampleSizes[i2]) : [],
1177
+ // Sample size
1178
+ sampleFlagsPresent ? u32(allSampleFlags[i2]) : [],
1179
+ // Sample flags
1180
+ // Sample composition time offsets
1181
+ sampleCompositionTimeOffsetsPresent ? i32(allSampleCompositionTimeOffsets[i2]) : []
1182
+ ])
1183
+ ]);
1184
+ };
1185
+ var mfra = (tracks) => {
1186
+ return box("mfra", null, [
1187
+ ...tracks.map(tfra),
1188
+ mfro()
1189
+ ]);
1190
+ };
1191
+ var tfra = (track, trackIndex) => {
1192
+ let version = 1;
1193
+ return fullBox("tfra", version, 0, [
1194
+ u32(track.id),
1195
+ // Track ID
1196
+ u32(63),
1197
+ // This specifies that traf number, trun number and sample number are 32-bit ints
1198
+ u32(track.finalizedChunks.length),
1199
+ // Number of entries
1200
+ track.finalizedChunks.map((chunk) => [
1201
+ u64(intoTimescale(chunk.startTimestamp, track.timescale)),
1202
+ // Time
1203
+ u64(chunk.moofOffset),
1204
+ // moof offset
1205
+ u32(trackIndex + 1),
1206
+ // traf number
1207
+ u32(1),
1208
+ // trun number
1209
+ u32(1)
1210
+ // Sample number
1211
+ ])
1212
+ ]);
1213
+ };
1214
+ var mfro = () => {
1215
+ return fullBox("mfro", 0, 0, [
1216
+ // This value needs to be overwritten manually from the outside, where the actual size of the enclosing mfra box
1217
+ // is known
1218
+ u32(0)
1219
+ // Size
1220
+ ]);
1221
+ };
1222
+ var VIDEO_CODEC_TO_BOX_NAME = {
1223
+ "avc": "avc1",
1224
+ "hevc": "hvc1",
1225
+ "vp9": "vp09",
1226
+ "av1": "av01"
1227
+ };
1228
+ var VIDEO_CODEC_TO_CONFIGURATION_BOX = {
1229
+ "avc": avcC,
1230
+ "hevc": hvcC,
1231
+ "vp9": vpcC,
1232
+ "av1": av1C
1233
+ };
1234
+ var AUDIO_CODEC_TO_BOX_NAME = {
1235
+ "aac": "mp4a",
1236
+ "opus": "Opus"
1237
+ };
1238
+ var AUDIO_CODEC_TO_CONFIGURATION_BOX = {
1239
+ "aac": esds,
1240
+ "opus": dOps
1241
+ };
1242
+ var Target = class {
1243
+ };
1244
+ var ArrayBufferTarget = class extends Target {
1245
+ constructor() {
1246
+ super(...arguments);
1247
+ this.buffer = null;
1248
+ }
1249
+ };
1250
+ var StreamTarget = class extends Target {
1251
+ constructor(options) {
1252
+ super();
1253
+ this.options = options;
1254
+ if (typeof options !== "object") {
1255
+ throw new TypeError("StreamTarget requires an options object to be passed to its constructor.");
1256
+ }
1257
+ if (options.onData) {
1258
+ if (typeof options.onData !== "function") {
1259
+ throw new TypeError("options.onData, when provided, must be a function.");
1260
+ }
1261
+ if (options.onData.length < 2) {
1262
+ throw new TypeError(
1263
+ "options.onData, when provided, must be a function that takes in at least two arguments (data and position). Ignoring the position argument, which specifies the byte offset at which the data is to be written, can lead to broken outputs."
1264
+ );
1265
+ }
1266
+ }
1267
+ if (options.chunked !== void 0 && typeof options.chunked !== "boolean") {
1268
+ throw new TypeError("options.chunked, when provided, must be a boolean.");
1269
+ }
1270
+ if (options.chunkSize !== void 0 && (!Number.isInteger(options.chunkSize) || options.chunkSize < 1024)) {
1271
+ throw new TypeError("options.chunkSize, when provided, must be an integer and not smaller than 1024.");
1272
+ }
1273
+ }
1274
+ };
1275
+ var FileSystemWritableFileStreamTarget = class extends Target {
1276
+ constructor(stream, options) {
1277
+ super();
1278
+ this.stream = stream;
1279
+ this.options = options;
1280
+ if (!(stream instanceof FileSystemWritableFileStream)) {
1281
+ throw new TypeError("FileSystemWritableFileStreamTarget requires a FileSystemWritableFileStream instance.");
1282
+ }
1283
+ if (options !== void 0 && typeof options !== "object") {
1284
+ throw new TypeError("FileSystemWritableFileStreamTarget's options, when provided, must be an object.");
1285
+ }
1286
+ if (options) {
1287
+ if (options.chunkSize !== void 0 && (!Number.isInteger(options.chunkSize) || options.chunkSize <= 0)) {
1288
+ throw new TypeError("options.chunkSize, when provided, must be a positive integer");
1289
+ }
1290
+ }
1291
+ }
1292
+ };
1293
+ var _helper, _helperView;
1294
+ var Writer = class {
1295
+ constructor() {
1296
+ this.pos = 0;
1297
+ __privateAdd(this, _helper, new Uint8Array(8));
1298
+ __privateAdd(this, _helperView, new DataView(__privateGet(this, _helper).buffer));
1299
+ this.offsets = /* @__PURE__ */ new WeakMap();
1300
+ }
1301
+ /** Sets the current position for future writes to a new one. */
1302
+ seek(newPos) {
1303
+ this.pos = newPos;
1304
+ }
1305
+ writeU32(value) {
1306
+ __privateGet(this, _helperView).setUint32(0, value, false);
1307
+ this.write(__privateGet(this, _helper).subarray(0, 4));
1308
+ }
1309
+ writeU64(value) {
1310
+ __privateGet(this, _helperView).setUint32(0, Math.floor(value / 2 ** 32), false);
1311
+ __privateGet(this, _helperView).setUint32(4, value, false);
1312
+ this.write(__privateGet(this, _helper).subarray(0, 8));
1313
+ }
1314
+ writeAscii(text) {
1315
+ for (let i2 = 0; i2 < text.length; i2++) {
1316
+ __privateGet(this, _helperView).setUint8(i2 % 8, text.charCodeAt(i2));
1317
+ if (i2 % 8 === 7)
1318
+ this.write(__privateGet(this, _helper));
1319
+ }
1320
+ if (text.length % 8 !== 0) {
1321
+ this.write(__privateGet(this, _helper).subarray(0, text.length % 8));
1322
+ }
1323
+ }
1324
+ writeBox(box2) {
1325
+ this.offsets.set(box2, this.pos);
1326
+ if (box2.contents && !box2.children) {
1327
+ this.writeBoxHeader(box2, box2.size ?? box2.contents.byteLength + 8);
1328
+ this.write(box2.contents);
1329
+ } else {
1330
+ let startPos = this.pos;
1331
+ this.writeBoxHeader(box2, 0);
1332
+ if (box2.contents)
1333
+ this.write(box2.contents);
1334
+ if (box2.children) {
1335
+ for (let child of box2.children)
1336
+ if (child)
1337
+ this.writeBox(child);
1338
+ }
1339
+ let endPos = this.pos;
1340
+ let size = box2.size ?? endPos - startPos;
1341
+ this.seek(startPos);
1342
+ this.writeBoxHeader(box2, size);
1343
+ this.seek(endPos);
1344
+ }
1345
+ }
1346
+ writeBoxHeader(box2, size) {
1347
+ this.writeU32(box2.largeSize ? 1 : size);
1348
+ this.writeAscii(box2.type);
1349
+ if (box2.largeSize)
1350
+ this.writeU64(size);
1351
+ }
1352
+ measureBoxHeader(box2) {
1353
+ return 8 + (box2.largeSize ? 8 : 0);
1354
+ }
1355
+ patchBox(box2) {
1356
+ let endPos = this.pos;
1357
+ this.seek(this.offsets.get(box2));
1358
+ this.writeBox(box2);
1359
+ this.seek(endPos);
1360
+ }
1361
+ measureBox(box2) {
1362
+ if (box2.contents && !box2.children) {
1363
+ let headerSize = this.measureBoxHeader(box2);
1364
+ return headerSize + box2.contents.byteLength;
1365
+ } else {
1366
+ let result = this.measureBoxHeader(box2);
1367
+ if (box2.contents)
1368
+ result += box2.contents.byteLength;
1369
+ if (box2.children) {
1370
+ for (let child of box2.children)
1371
+ if (child)
1372
+ result += this.measureBox(child);
1373
+ }
1374
+ return result;
1375
+ }
1376
+ }
1377
+ };
1378
+ _helper = /* @__PURE__ */ new WeakMap();
1379
+ _helperView = /* @__PURE__ */ new WeakMap();
1380
+ var _target, _buffer, _bytes, _maxPos, _ensureSize, ensureSize_fn;
1381
+ var ArrayBufferTargetWriter = class extends Writer {
1382
+ constructor(target) {
1383
+ super();
1384
+ __privateAdd(this, _ensureSize);
1385
+ __privateAdd(this, _target, void 0);
1386
+ __privateAdd(this, _buffer, new ArrayBuffer(2 ** 16));
1387
+ __privateAdd(this, _bytes, new Uint8Array(__privateGet(this, _buffer)));
1388
+ __privateAdd(this, _maxPos, 0);
1389
+ __privateSet(this, _target, target);
1390
+ }
1391
+ write(data) {
1392
+ __privateMethod(this, _ensureSize, ensureSize_fn).call(this, this.pos + data.byteLength);
1393
+ __privateGet(this, _bytes).set(data, this.pos);
1394
+ this.pos += data.byteLength;
1395
+ __privateSet(this, _maxPos, Math.max(__privateGet(this, _maxPos), this.pos));
1396
+ }
1397
+ finalize() {
1398
+ __privateMethod(this, _ensureSize, ensureSize_fn).call(this, this.pos);
1399
+ __privateGet(this, _target).buffer = __privateGet(this, _buffer).slice(0, Math.max(__privateGet(this, _maxPos), this.pos));
1400
+ }
1401
+ };
1402
+ _target = /* @__PURE__ */ new WeakMap();
1403
+ _buffer = /* @__PURE__ */ new WeakMap();
1404
+ _bytes = /* @__PURE__ */ new WeakMap();
1405
+ _maxPos = /* @__PURE__ */ new WeakMap();
1406
+ _ensureSize = /* @__PURE__ */ new WeakSet();
1407
+ ensureSize_fn = function(size) {
1408
+ let newLength = __privateGet(this, _buffer).byteLength;
1409
+ while (newLength < size)
1410
+ newLength *= 2;
1411
+ if (newLength === __privateGet(this, _buffer).byteLength)
1412
+ return;
1413
+ let newBuffer = new ArrayBuffer(newLength);
1414
+ let newBytes = new Uint8Array(newBuffer);
1415
+ newBytes.set(__privateGet(this, _bytes), 0);
1416
+ __privateSet(this, _buffer, newBuffer);
1417
+ __privateSet(this, _bytes, newBytes);
1418
+ };
1419
+ var DEFAULT_CHUNK_SIZE = 2 ** 24;
1420
+ var MAX_CHUNKS_AT_ONCE = 2;
1421
+ var _target2, _sections, _chunked, _chunkSize, _chunks, _writeDataIntoChunks, writeDataIntoChunks_fn, _insertSectionIntoChunk, insertSectionIntoChunk_fn, _createChunk, createChunk_fn, _flushChunks, flushChunks_fn;
1422
+ var StreamTargetWriter = class extends Writer {
1423
+ constructor(target) {
1424
+ super();
1425
+ __privateAdd(this, _writeDataIntoChunks);
1426
+ __privateAdd(this, _insertSectionIntoChunk);
1427
+ __privateAdd(this, _createChunk);
1428
+ __privateAdd(this, _flushChunks);
1429
+ __privateAdd(this, _target2, void 0);
1430
+ __privateAdd(this, _sections, []);
1431
+ __privateAdd(this, _chunked, void 0);
1432
+ __privateAdd(this, _chunkSize, void 0);
1433
+ __privateAdd(this, _chunks, []);
1434
+ __privateSet(this, _target2, target);
1435
+ __privateSet(this, _chunked, target.options?.chunked ?? false);
1436
+ __privateSet(this, _chunkSize, target.options?.chunkSize ?? DEFAULT_CHUNK_SIZE);
1437
+ }
1438
+ write(data) {
1439
+ __privateGet(this, _sections).push({
1440
+ data: data.slice(),
1441
+ start: this.pos
1442
+ });
1443
+ this.pos += data.byteLength;
1444
+ }
1445
+ flush() {
1446
+ if (__privateGet(this, _sections).length === 0)
1447
+ return;
1448
+ let chunks = [];
1449
+ let sorted = [...__privateGet(this, _sections)].sort((a2, b2) => a2.start - b2.start);
1450
+ chunks.push({
1451
+ start: sorted[0].start,
1452
+ size: sorted[0].data.byteLength
1453
+ });
1454
+ for (let i2 = 1; i2 < sorted.length; i2++) {
1455
+ let lastChunk = chunks[chunks.length - 1];
1456
+ let section = sorted[i2];
1457
+ if (section.start <= lastChunk.start + lastChunk.size) {
1458
+ lastChunk.size = Math.max(lastChunk.size, section.start + section.data.byteLength - lastChunk.start);
1459
+ } else {
1460
+ chunks.push({
1461
+ start: section.start,
1462
+ size: section.data.byteLength
1463
+ });
1464
+ }
1465
+ }
1466
+ for (let chunk of chunks) {
1467
+ chunk.data = new Uint8Array(chunk.size);
1468
+ for (let section of __privateGet(this, _sections)) {
1469
+ if (chunk.start <= section.start && section.start < chunk.start + chunk.size) {
1470
+ chunk.data.set(section.data, section.start - chunk.start);
1471
+ }
1472
+ }
1473
+ if (__privateGet(this, _chunked)) {
1474
+ __privateMethod(this, _writeDataIntoChunks, writeDataIntoChunks_fn).call(this, chunk.data, chunk.start);
1475
+ __privateMethod(this, _flushChunks, flushChunks_fn).call(this);
1476
+ } else {
1477
+ __privateGet(this, _target2).options.onData?.(chunk.data, chunk.start);
1478
+ }
1479
+ }
1480
+ __privateGet(this, _sections).length = 0;
1481
+ }
1482
+ finalize() {
1483
+ if (__privateGet(this, _chunked)) {
1484
+ __privateMethod(this, _flushChunks, flushChunks_fn).call(this, true);
1485
+ }
1486
+ }
1487
+ };
1488
+ _target2 = /* @__PURE__ */ new WeakMap();
1489
+ _sections = /* @__PURE__ */ new WeakMap();
1490
+ _chunked = /* @__PURE__ */ new WeakMap();
1491
+ _chunkSize = /* @__PURE__ */ new WeakMap();
1492
+ _chunks = /* @__PURE__ */ new WeakMap();
1493
+ _writeDataIntoChunks = /* @__PURE__ */ new WeakSet();
1494
+ writeDataIntoChunks_fn = function(data, position) {
1495
+ let chunkIndex = __privateGet(this, _chunks).findIndex((x) => x.start <= position && position < x.start + __privateGet(this, _chunkSize));
1496
+ if (chunkIndex === -1)
1497
+ chunkIndex = __privateMethod(this, _createChunk, createChunk_fn).call(this, position);
1498
+ let chunk = __privateGet(this, _chunks)[chunkIndex];
1499
+ let relativePosition = position - chunk.start;
1500
+ let toWrite = data.subarray(0, Math.min(__privateGet(this, _chunkSize) - relativePosition, data.byteLength));
1501
+ chunk.data.set(toWrite, relativePosition);
1502
+ let section = {
1503
+ start: relativePosition,
1504
+ end: relativePosition + toWrite.byteLength
1505
+ };
1506
+ __privateMethod(this, _insertSectionIntoChunk, insertSectionIntoChunk_fn).call(this, chunk, section);
1507
+ if (chunk.written[0].start === 0 && chunk.written[0].end === __privateGet(this, _chunkSize)) {
1508
+ chunk.shouldFlush = true;
1509
+ }
1510
+ if (__privateGet(this, _chunks).length > MAX_CHUNKS_AT_ONCE) {
1511
+ for (let i2 = 0; i2 < __privateGet(this, _chunks).length - 1; i2++) {
1512
+ __privateGet(this, _chunks)[i2].shouldFlush = true;
1513
+ }
1514
+ __privateMethod(this, _flushChunks, flushChunks_fn).call(this);
1515
+ }
1516
+ if (toWrite.byteLength < data.byteLength) {
1517
+ __privateMethod(this, _writeDataIntoChunks, writeDataIntoChunks_fn).call(this, data.subarray(toWrite.byteLength), position + toWrite.byteLength);
1518
+ }
1519
+ };
1520
+ _insertSectionIntoChunk = /* @__PURE__ */ new WeakSet();
1521
+ insertSectionIntoChunk_fn = function(chunk, section) {
1522
+ let low = 0;
1523
+ let high = chunk.written.length - 1;
1524
+ let index = -1;
1525
+ while (low <= high) {
1526
+ let mid = Math.floor(low + (high - low + 1) / 2);
1527
+ if (chunk.written[mid].start <= section.start) {
1528
+ low = mid + 1;
1529
+ index = mid;
1530
+ } else {
1531
+ high = mid - 1;
1532
+ }
1533
+ }
1534
+ chunk.written.splice(index + 1, 0, section);
1535
+ if (index === -1 || chunk.written[index].end < section.start)
1536
+ index++;
1537
+ while (index < chunk.written.length - 1 && chunk.written[index].end >= chunk.written[index + 1].start) {
1538
+ chunk.written[index].end = Math.max(chunk.written[index].end, chunk.written[index + 1].end);
1539
+ chunk.written.splice(index + 1, 1);
1540
+ }
1541
+ };
1542
+ _createChunk = /* @__PURE__ */ new WeakSet();
1543
+ createChunk_fn = function(includesPosition) {
1544
+ let start = Math.floor(includesPosition / __privateGet(this, _chunkSize)) * __privateGet(this, _chunkSize);
1545
+ let chunk = {
1546
+ start,
1547
+ data: new Uint8Array(__privateGet(this, _chunkSize)),
1548
+ written: [],
1549
+ shouldFlush: false
1550
+ };
1551
+ __privateGet(this, _chunks).push(chunk);
1552
+ __privateGet(this, _chunks).sort((a2, b2) => a2.start - b2.start);
1553
+ return __privateGet(this, _chunks).indexOf(chunk);
1554
+ };
1555
+ _flushChunks = /* @__PURE__ */ new WeakSet();
1556
+ flushChunks_fn = function(force = false) {
1557
+ for (let i2 = 0; i2 < __privateGet(this, _chunks).length; i2++) {
1558
+ let chunk = __privateGet(this, _chunks)[i2];
1559
+ if (!chunk.shouldFlush && !force)
1560
+ continue;
1561
+ for (let section of chunk.written) {
1562
+ __privateGet(this, _target2).options.onData?.(
1563
+ chunk.data.subarray(section.start, section.end),
1564
+ chunk.start + section.start
1565
+ );
1566
+ }
1567
+ __privateGet(this, _chunks).splice(i2--, 1);
1568
+ }
1569
+ };
1570
+ var FileSystemWritableFileStreamTargetWriter = class extends StreamTargetWriter {
1571
+ constructor(target) {
1572
+ super(new StreamTarget({
1573
+ onData: (data, position) => target.stream.write({
1574
+ type: "write",
1575
+ data,
1576
+ position
1577
+ }),
1578
+ chunked: true,
1579
+ chunkSize: target.options?.chunkSize
1580
+ }));
1581
+ }
1582
+ };
1583
+ var GLOBAL_TIMESCALE = 1e3;
1584
+ var SUPPORTED_VIDEO_CODECS = ["avc", "hevc", "vp9", "av1"];
1585
+ var SUPPORTED_AUDIO_CODECS = ["aac", "opus"];
1586
+ var TIMESTAMP_OFFSET = 2082844800;
1587
+ var FIRST_TIMESTAMP_BEHAVIORS = ["strict", "offset", "cross-track-offset"];
1588
+ var _options, _writer, _ftypSize, _mdat, _videoTrack, _audioTrack, _creationTime, _finalizedChunks, _nextFragmentNumber, _videoSampleQueue, _audioSampleQueue, _finalized, _validateOptions, validateOptions_fn, _writeHeader, writeHeader_fn, _computeMoovSizeUpperBound, computeMoovSizeUpperBound_fn, _prepareTracks, prepareTracks_fn, _generateMpeg4AudioSpecificConfig, generateMpeg4AudioSpecificConfig_fn, _createSampleForTrack, createSampleForTrack_fn, _addSampleToTrack, addSampleToTrack_fn, _validateTimestamp, validateTimestamp_fn, _finalizeCurrentChunk, finalizeCurrentChunk_fn, _finalizeFragment, finalizeFragment_fn, _maybeFlushStreamingTargetWriter, maybeFlushStreamingTargetWriter_fn, _ensureNotFinalized, ensureNotFinalized_fn;
1589
+ var Muxer = class {
1590
+ constructor(options) {
1591
+ __privateAdd(this, _validateOptions);
1592
+ __privateAdd(this, _writeHeader);
1593
+ __privateAdd(this, _computeMoovSizeUpperBound);
1594
+ __privateAdd(this, _prepareTracks);
1595
+ __privateAdd(this, _generateMpeg4AudioSpecificConfig);
1596
+ __privateAdd(this, _createSampleForTrack);
1597
+ __privateAdd(this, _addSampleToTrack);
1598
+ __privateAdd(this, _validateTimestamp);
1599
+ __privateAdd(this, _finalizeCurrentChunk);
1600
+ __privateAdd(this, _finalizeFragment);
1601
+ __privateAdd(this, _maybeFlushStreamingTargetWriter);
1602
+ __privateAdd(this, _ensureNotFinalized);
1603
+ __privateAdd(this, _options, void 0);
1604
+ __privateAdd(this, _writer, void 0);
1605
+ __privateAdd(this, _ftypSize, void 0);
1606
+ __privateAdd(this, _mdat, void 0);
1607
+ __privateAdd(this, _videoTrack, null);
1608
+ __privateAdd(this, _audioTrack, null);
1609
+ __privateAdd(this, _creationTime, Math.floor(Date.now() / 1e3) + TIMESTAMP_OFFSET);
1610
+ __privateAdd(this, _finalizedChunks, []);
1611
+ __privateAdd(this, _nextFragmentNumber, 1);
1612
+ __privateAdd(this, _videoSampleQueue, []);
1613
+ __privateAdd(this, _audioSampleQueue, []);
1614
+ __privateAdd(this, _finalized, false);
1615
+ __privateMethod(this, _validateOptions, validateOptions_fn).call(this, options);
1616
+ options.video = deepClone(options.video);
1617
+ options.audio = deepClone(options.audio);
1618
+ options.fastStart = deepClone(options.fastStart);
1619
+ this.target = options.target;
1620
+ __privateSet(this, _options, {
1621
+ firstTimestampBehavior: "strict",
1622
+ ...options
1623
+ });
1624
+ if (options.target instanceof ArrayBufferTarget) {
1625
+ __privateSet(this, _writer, new ArrayBufferTargetWriter(options.target));
1626
+ } else if (options.target instanceof StreamTarget) {
1627
+ __privateSet(this, _writer, new StreamTargetWriter(options.target));
1628
+ } else if (options.target instanceof FileSystemWritableFileStreamTarget) {
1629
+ __privateSet(this, _writer, new FileSystemWritableFileStreamTargetWriter(options.target));
1630
+ } else {
1631
+ throw new Error(`Invalid target: ${options.target}`);
1632
+ }
1633
+ __privateMethod(this, _prepareTracks, prepareTracks_fn).call(this);
1634
+ __privateMethod(this, _writeHeader, writeHeader_fn).call(this);
1635
+ }
1636
+ addVideoChunk(sample, meta, timestamp, compositionTimeOffset) {
1637
+ if (!(sample instanceof EncodedVideoChunk)) {
1638
+ throw new TypeError("addVideoChunk's first argument (sample) must be of type EncodedVideoChunk.");
1639
+ }
1640
+ if (meta && typeof meta !== "object") {
1641
+ throw new TypeError("addVideoChunk's second argument (meta), when provided, must be an object.");
1642
+ }
1643
+ if (timestamp !== void 0 && (!Number.isFinite(timestamp) || timestamp < 0)) {
1644
+ throw new TypeError(
1645
+ "addVideoChunk's third argument (timestamp), when provided, must be a non-negative real number."
1646
+ );
1647
+ }
1648
+ if (compositionTimeOffset !== void 0 && !Number.isFinite(compositionTimeOffset)) {
1649
+ throw new TypeError(
1650
+ "addVideoChunk's fourth argument (compositionTimeOffset), when provided, must be a real number."
1651
+ );
1652
+ }
1653
+ let data = new Uint8Array(sample.byteLength);
1654
+ sample.copyTo(data);
1655
+ this.addVideoChunkRaw(
1656
+ data,
1657
+ sample.type,
1658
+ timestamp ?? sample.timestamp,
1659
+ sample.duration,
1660
+ meta,
1661
+ compositionTimeOffset
1662
+ );
1663
+ }
1664
+ addVideoChunkRaw(data, type, timestamp, duration, meta, compositionTimeOffset) {
1665
+ if (!(data instanceof Uint8Array)) {
1666
+ throw new TypeError("addVideoChunkRaw's first argument (data) must be an instance of Uint8Array.");
1667
+ }
1668
+ if (type !== "key" && type !== "delta") {
1669
+ throw new TypeError("addVideoChunkRaw's second argument (type) must be either 'key' or 'delta'.");
1670
+ }
1671
+ if (!Number.isFinite(timestamp) || timestamp < 0) {
1672
+ throw new TypeError("addVideoChunkRaw's third argument (timestamp) must be a non-negative real number.");
1673
+ }
1674
+ if (!Number.isFinite(duration) || duration < 0) {
1675
+ throw new TypeError("addVideoChunkRaw's fourth argument (duration) must be a non-negative real number.");
1676
+ }
1677
+ if (meta && typeof meta !== "object") {
1678
+ throw new TypeError("addVideoChunkRaw's fifth argument (meta), when provided, must be an object.");
1679
+ }
1680
+ if (compositionTimeOffset !== void 0 && !Number.isFinite(compositionTimeOffset)) {
1681
+ throw new TypeError(
1682
+ "addVideoChunkRaw's sixth argument (compositionTimeOffset), when provided, must be a real number."
1683
+ );
1684
+ }
1685
+ __privateMethod(this, _ensureNotFinalized, ensureNotFinalized_fn).call(this);
1686
+ if (!__privateGet(this, _options).video)
1687
+ throw new Error("No video track declared.");
1688
+ if (typeof __privateGet(this, _options).fastStart === "object" && __privateGet(this, _videoTrack).samples.length === __privateGet(this, _options).fastStart.expectedVideoChunks) {
1689
+ throw new Error(`Cannot add more video chunks than specified in 'fastStart' (${__privateGet(this, _options).fastStart.expectedVideoChunks}).`);
1690
+ }
1691
+ let videoSample = __privateMethod(this, _createSampleForTrack, createSampleForTrack_fn).call(this, __privateGet(this, _videoTrack), data, type, timestamp, duration, meta, compositionTimeOffset);
1692
+ if (__privateGet(this, _options).fastStart === "fragmented" && __privateGet(this, _audioTrack)) {
1693
+ while (__privateGet(this, _audioSampleQueue).length > 0 && __privateGet(this, _audioSampleQueue)[0].decodeTimestamp <= videoSample.decodeTimestamp) {
1694
+ let audioSample = __privateGet(this, _audioSampleQueue).shift();
1695
+ __privateMethod(this, _addSampleToTrack, addSampleToTrack_fn).call(this, __privateGet(this, _audioTrack), audioSample);
1696
+ }
1697
+ if (videoSample.decodeTimestamp <= __privateGet(this, _audioTrack).lastDecodeTimestamp) {
1698
+ __privateMethod(this, _addSampleToTrack, addSampleToTrack_fn).call(this, __privateGet(this, _videoTrack), videoSample);
1699
+ } else {
1700
+ __privateGet(this, _videoSampleQueue).push(videoSample);
1701
+ }
1702
+ } else {
1703
+ __privateMethod(this, _addSampleToTrack, addSampleToTrack_fn).call(this, __privateGet(this, _videoTrack), videoSample);
1704
+ }
1705
+ }
1706
+ addAudioChunk(sample, meta, timestamp) {
1707
+ if (!(sample instanceof EncodedAudioChunk)) {
1708
+ throw new TypeError("addAudioChunk's first argument (sample) must be of type EncodedAudioChunk.");
1709
+ }
1710
+ if (meta && typeof meta !== "object") {
1711
+ throw new TypeError("addAudioChunk's second argument (meta), when provided, must be an object.");
1712
+ }
1713
+ if (timestamp !== void 0 && (!Number.isFinite(timestamp) || timestamp < 0)) {
1714
+ throw new TypeError(
1715
+ "addAudioChunk's third argument (timestamp), when provided, must be a non-negative real number."
1716
+ );
1717
+ }
1718
+ let data = new Uint8Array(sample.byteLength);
1719
+ sample.copyTo(data);
1720
+ this.addAudioChunkRaw(data, sample.type, timestamp ?? sample.timestamp, sample.duration, meta);
1721
+ }
1722
+ addAudioChunkRaw(data, type, timestamp, duration, meta) {
1723
+ if (!(data instanceof Uint8Array)) {
1724
+ throw new TypeError("addAudioChunkRaw's first argument (data) must be an instance of Uint8Array.");
1725
+ }
1726
+ if (type !== "key" && type !== "delta") {
1727
+ throw new TypeError("addAudioChunkRaw's second argument (type) must be either 'key' or 'delta'.");
1728
+ }
1729
+ if (!Number.isFinite(timestamp) || timestamp < 0) {
1730
+ throw new TypeError("addAudioChunkRaw's third argument (timestamp) must be a non-negative real number.");
1731
+ }
1732
+ if (!Number.isFinite(duration) || duration < 0) {
1733
+ throw new TypeError("addAudioChunkRaw's fourth argument (duration) must be a non-negative real number.");
1734
+ }
1735
+ if (meta && typeof meta !== "object") {
1736
+ throw new TypeError("addAudioChunkRaw's fifth argument (meta), when provided, must be an object.");
1737
+ }
1738
+ __privateMethod(this, _ensureNotFinalized, ensureNotFinalized_fn).call(this);
1739
+ if (!__privateGet(this, _options).audio)
1740
+ throw new Error("No audio track declared.");
1741
+ if (typeof __privateGet(this, _options).fastStart === "object" && __privateGet(this, _audioTrack).samples.length === __privateGet(this, _options).fastStart.expectedAudioChunks) {
1742
+ throw new Error(`Cannot add more audio chunks than specified in 'fastStart' (${__privateGet(this, _options).fastStart.expectedAudioChunks}).`);
1743
+ }
1744
+ let audioSample = __privateMethod(this, _createSampleForTrack, createSampleForTrack_fn).call(this, __privateGet(this, _audioTrack), data, type, timestamp, duration, meta);
1745
+ if (__privateGet(this, _options).fastStart === "fragmented" && __privateGet(this, _videoTrack)) {
1746
+ while (__privateGet(this, _videoSampleQueue).length > 0 && __privateGet(this, _videoSampleQueue)[0].decodeTimestamp <= audioSample.decodeTimestamp) {
1747
+ let videoSample = __privateGet(this, _videoSampleQueue).shift();
1748
+ __privateMethod(this, _addSampleToTrack, addSampleToTrack_fn).call(this, __privateGet(this, _videoTrack), videoSample);
1749
+ }
1750
+ if (audioSample.decodeTimestamp <= __privateGet(this, _videoTrack).lastDecodeTimestamp) {
1751
+ __privateMethod(this, _addSampleToTrack, addSampleToTrack_fn).call(this, __privateGet(this, _audioTrack), audioSample);
1752
+ } else {
1753
+ __privateGet(this, _audioSampleQueue).push(audioSample);
1754
+ }
1755
+ } else {
1756
+ __privateMethod(this, _addSampleToTrack, addSampleToTrack_fn).call(this, __privateGet(this, _audioTrack), audioSample);
1757
+ }
1758
+ }
1759
+ /** Finalizes the file, making it ready for use. Must be called after all video and audio chunks have been added. */
1760
+ finalize() {
1761
+ if (__privateGet(this, _finalized)) {
1762
+ throw new Error("Cannot finalize a muxer more than once.");
1763
+ }
1764
+ if (__privateGet(this, _options).fastStart === "fragmented") {
1765
+ for (let videoSample of __privateGet(this, _videoSampleQueue))
1766
+ __privateMethod(this, _addSampleToTrack, addSampleToTrack_fn).call(this, __privateGet(this, _videoTrack), videoSample);
1767
+ for (let audioSample of __privateGet(this, _audioSampleQueue))
1768
+ __privateMethod(this, _addSampleToTrack, addSampleToTrack_fn).call(this, __privateGet(this, _audioTrack), audioSample);
1769
+ __privateMethod(this, _finalizeFragment, finalizeFragment_fn).call(this, false);
1770
+ } else {
1771
+ if (__privateGet(this, _videoTrack))
1772
+ __privateMethod(this, _finalizeCurrentChunk, finalizeCurrentChunk_fn).call(this, __privateGet(this, _videoTrack));
1773
+ if (__privateGet(this, _audioTrack))
1774
+ __privateMethod(this, _finalizeCurrentChunk, finalizeCurrentChunk_fn).call(this, __privateGet(this, _audioTrack));
1775
+ }
1776
+ let tracks = [__privateGet(this, _videoTrack), __privateGet(this, _audioTrack)].filter(Boolean);
1777
+ if (__privateGet(this, _options).fastStart === "in-memory") {
1778
+ let mdatSize;
1779
+ for (let i2 = 0; i2 < 2; i2++) {
1780
+ let movieBox2 = moov(tracks, __privateGet(this, _creationTime));
1781
+ let movieBoxSize = __privateGet(this, _writer).measureBox(movieBox2);
1782
+ mdatSize = __privateGet(this, _writer).measureBox(__privateGet(this, _mdat));
1783
+ let currentChunkPos = __privateGet(this, _writer).pos + movieBoxSize + mdatSize;
1784
+ for (let chunk of __privateGet(this, _finalizedChunks)) {
1785
+ chunk.offset = currentChunkPos;
1786
+ for (let { data } of chunk.samples) {
1787
+ currentChunkPos += data.byteLength;
1788
+ mdatSize += data.byteLength;
1789
+ }
1790
+ }
1791
+ if (currentChunkPos < 2 ** 32)
1792
+ break;
1793
+ if (mdatSize >= 2 ** 32)
1794
+ __privateGet(this, _mdat).largeSize = true;
1795
+ }
1796
+ let movieBox = moov(tracks, __privateGet(this, _creationTime));
1797
+ __privateGet(this, _writer).writeBox(movieBox);
1798
+ __privateGet(this, _mdat).size = mdatSize;
1799
+ __privateGet(this, _writer).writeBox(__privateGet(this, _mdat));
1800
+ for (let chunk of __privateGet(this, _finalizedChunks)) {
1801
+ for (let sample of chunk.samples) {
1802
+ __privateGet(this, _writer).write(sample.data);
1803
+ sample.data = null;
1804
+ }
1805
+ }
1806
+ } else if (__privateGet(this, _options).fastStart === "fragmented") {
1807
+ let startPos = __privateGet(this, _writer).pos;
1808
+ let mfraBox = mfra(tracks);
1809
+ __privateGet(this, _writer).writeBox(mfraBox);
1810
+ let mfraBoxSize = __privateGet(this, _writer).pos - startPos;
1811
+ __privateGet(this, _writer).seek(__privateGet(this, _writer).pos - 4);
1812
+ __privateGet(this, _writer).writeU32(mfraBoxSize);
1813
+ } else {
1814
+ let mdatPos = __privateGet(this, _writer).offsets.get(__privateGet(this, _mdat));
1815
+ let mdatSize = __privateGet(this, _writer).pos - mdatPos;
1816
+ __privateGet(this, _mdat).size = mdatSize;
1817
+ __privateGet(this, _mdat).largeSize = mdatSize >= 2 ** 32;
1818
+ __privateGet(this, _writer).patchBox(__privateGet(this, _mdat));
1819
+ let movieBox = moov(tracks, __privateGet(this, _creationTime));
1820
+ if (typeof __privateGet(this, _options).fastStart === "object") {
1821
+ __privateGet(this, _writer).seek(__privateGet(this, _ftypSize));
1822
+ __privateGet(this, _writer).writeBox(movieBox);
1823
+ let remainingBytes = mdatPos - __privateGet(this, _writer).pos;
1824
+ __privateGet(this, _writer).writeBox(free(remainingBytes));
1825
+ } else {
1826
+ __privateGet(this, _writer).writeBox(movieBox);
1827
+ }
1828
+ }
1829
+ __privateMethod(this, _maybeFlushStreamingTargetWriter, maybeFlushStreamingTargetWriter_fn).call(this);
1830
+ __privateGet(this, _writer).finalize();
1831
+ __privateSet(this, _finalized, true);
1832
+ }
1833
+ };
1834
+ _options = /* @__PURE__ */ new WeakMap();
1835
+ _writer = /* @__PURE__ */ new WeakMap();
1836
+ _ftypSize = /* @__PURE__ */ new WeakMap();
1837
+ _mdat = /* @__PURE__ */ new WeakMap();
1838
+ _videoTrack = /* @__PURE__ */ new WeakMap();
1839
+ _audioTrack = /* @__PURE__ */ new WeakMap();
1840
+ _creationTime = /* @__PURE__ */ new WeakMap();
1841
+ _finalizedChunks = /* @__PURE__ */ new WeakMap();
1842
+ _nextFragmentNumber = /* @__PURE__ */ new WeakMap();
1843
+ _videoSampleQueue = /* @__PURE__ */ new WeakMap();
1844
+ _audioSampleQueue = /* @__PURE__ */ new WeakMap();
1845
+ _finalized = /* @__PURE__ */ new WeakMap();
1846
+ _validateOptions = /* @__PURE__ */ new WeakSet();
1847
+ validateOptions_fn = function(options) {
1848
+ if (typeof options !== "object") {
1849
+ throw new TypeError("The muxer requires an options object to be passed to its constructor.");
1850
+ }
1851
+ if (!(options.target instanceof Target)) {
1852
+ throw new TypeError("The target must be provided and an instance of Target.");
1853
+ }
1854
+ if (options.video) {
1855
+ if (!SUPPORTED_VIDEO_CODECS.includes(options.video.codec)) {
1856
+ throw new TypeError(`Unsupported video codec: ${options.video.codec}`);
1857
+ }
1858
+ if (!Number.isInteger(options.video.width) || options.video.width <= 0) {
1859
+ throw new TypeError(`Invalid video width: ${options.video.width}. Must be a positive integer.`);
1860
+ }
1861
+ if (!Number.isInteger(options.video.height) || options.video.height <= 0) {
1862
+ throw new TypeError(`Invalid video height: ${options.video.height}. Must be a positive integer.`);
1863
+ }
1864
+ const videoRotation = options.video.rotation;
1865
+ if (typeof videoRotation === "number" && ![0, 90, 180, 270].includes(videoRotation)) {
1866
+ throw new TypeError(`Invalid video rotation: ${videoRotation}. Has to be 0, 90, 180 or 270.`);
1867
+ } else if (Array.isArray(videoRotation) && (videoRotation.length !== 9 || videoRotation.some((value) => typeof value !== "number"))) {
1868
+ throw new TypeError(`Invalid video transformation matrix: ${videoRotation.join()}`);
1869
+ }
1870
+ if (options.video.frameRate !== void 0 && (!Number.isInteger(options.video.frameRate) || options.video.frameRate <= 0)) {
1871
+ throw new TypeError(
1872
+ `Invalid video frame rate: ${options.video.frameRate}. Must be a positive integer.`
1873
+ );
1874
+ }
1875
+ }
1876
+ if (options.audio) {
1877
+ if (!SUPPORTED_AUDIO_CODECS.includes(options.audio.codec)) {
1878
+ throw new TypeError(`Unsupported audio codec: ${options.audio.codec}`);
1879
+ }
1880
+ if (!Number.isInteger(options.audio.numberOfChannels) || options.audio.numberOfChannels <= 0) {
1881
+ throw new TypeError(
1882
+ `Invalid number of audio channels: ${options.audio.numberOfChannels}. Must be a positive integer.`
1883
+ );
1884
+ }
1885
+ if (!Number.isInteger(options.audio.sampleRate) || options.audio.sampleRate <= 0) {
1886
+ throw new TypeError(
1887
+ `Invalid audio sample rate: ${options.audio.sampleRate}. Must be a positive integer.`
1888
+ );
1889
+ }
1890
+ }
1891
+ if (options.firstTimestampBehavior && !FIRST_TIMESTAMP_BEHAVIORS.includes(options.firstTimestampBehavior)) {
1892
+ throw new TypeError(`Invalid first timestamp behavior: ${options.firstTimestampBehavior}`);
1893
+ }
1894
+ if (typeof options.fastStart === "object") {
1895
+ if (options.video) {
1896
+ if (options.fastStart.expectedVideoChunks === void 0) {
1897
+ throw new TypeError(`'fastStart' is an object but is missing property 'expectedVideoChunks'.`);
1898
+ } else if (!Number.isInteger(options.fastStart.expectedVideoChunks) || options.fastStart.expectedVideoChunks < 0) {
1899
+ throw new TypeError(`'expectedVideoChunks' must be a non-negative integer.`);
1900
+ }
1901
+ }
1902
+ if (options.audio) {
1903
+ if (options.fastStart.expectedAudioChunks === void 0) {
1904
+ throw new TypeError(`'fastStart' is an object but is missing property 'expectedAudioChunks'.`);
1905
+ } else if (!Number.isInteger(options.fastStart.expectedAudioChunks) || options.fastStart.expectedAudioChunks < 0) {
1906
+ throw new TypeError(`'expectedAudioChunks' must be a non-negative integer.`);
1907
+ }
1908
+ }
1909
+ } else if (![false, "in-memory", "fragmented"].includes(options.fastStart)) {
1910
+ throw new TypeError(`'fastStart' option must be false, 'in-memory', 'fragmented' or an object.`);
1911
+ }
1912
+ if (options.minFragmentDuration !== void 0 && (!Number.isFinite(options.minFragmentDuration) || options.minFragmentDuration < 0)) {
1913
+ throw new TypeError(`'minFragmentDuration' must be a non-negative number.`);
1914
+ }
1915
+ };
1916
+ _writeHeader = /* @__PURE__ */ new WeakSet();
1917
+ writeHeader_fn = function() {
1918
+ __privateGet(this, _writer).writeBox(ftyp({
1919
+ holdsAvc: __privateGet(this, _options).video?.codec === "avc",
1920
+ fragmented: __privateGet(this, _options).fastStart === "fragmented"
1921
+ }));
1922
+ __privateSet(this, _ftypSize, __privateGet(this, _writer).pos);
1923
+ if (__privateGet(this, _options).fastStart === "in-memory") {
1924
+ __privateSet(this, _mdat, mdat(false));
1925
+ } else if (__privateGet(this, _options).fastStart === "fragmented") ;
1926
+ else {
1927
+ if (typeof __privateGet(this, _options).fastStart === "object") {
1928
+ let moovSizeUpperBound = __privateMethod(this, _computeMoovSizeUpperBound, computeMoovSizeUpperBound_fn).call(this);
1929
+ __privateGet(this, _writer).seek(__privateGet(this, _writer).pos + moovSizeUpperBound);
1930
+ }
1931
+ __privateSet(this, _mdat, mdat(true));
1932
+ __privateGet(this, _writer).writeBox(__privateGet(this, _mdat));
1933
+ }
1934
+ __privateMethod(this, _maybeFlushStreamingTargetWriter, maybeFlushStreamingTargetWriter_fn).call(this);
1935
+ };
1936
+ _computeMoovSizeUpperBound = /* @__PURE__ */ new WeakSet();
1937
+ computeMoovSizeUpperBound_fn = function() {
1938
+ if (typeof __privateGet(this, _options).fastStart !== "object")
1939
+ return;
1940
+ let upperBound = 0;
1941
+ let sampleCounts = [
1942
+ __privateGet(this, _options).fastStart.expectedVideoChunks,
1943
+ __privateGet(this, _options).fastStart.expectedAudioChunks
1944
+ ];
1945
+ for (let n of sampleCounts) {
1946
+ if (!n)
1947
+ continue;
1948
+ upperBound += (4 + 4) * Math.ceil(2 / 3 * n);
1949
+ upperBound += 4 * n;
1950
+ upperBound += (4 + 4 + 4) * Math.ceil(2 / 3 * n);
1951
+ upperBound += 4 * n;
1952
+ upperBound += 8 * n;
1953
+ }
1954
+ upperBound += 4096;
1955
+ return upperBound;
1956
+ };
1957
+ _prepareTracks = /* @__PURE__ */ new WeakSet();
1958
+ prepareTracks_fn = function() {
1959
+ if (__privateGet(this, _options).video) {
1960
+ __privateSet(this, _videoTrack, {
1961
+ id: 1,
1962
+ info: {
1963
+ type: "video",
1964
+ codec: __privateGet(this, _options).video.codec,
1965
+ width: __privateGet(this, _options).video.width,
1966
+ height: __privateGet(this, _options).video.height,
1967
+ rotation: __privateGet(this, _options).video.rotation ?? 0,
1968
+ decoderConfig: null
1969
+ },
1970
+ // The fallback contains many common frame rates as factors
1971
+ timescale: __privateGet(this, _options).video.frameRate ?? 57600,
1972
+ samples: [],
1973
+ finalizedChunks: [],
1974
+ currentChunk: null,
1975
+ firstDecodeTimestamp: void 0,
1976
+ lastDecodeTimestamp: -1,
1977
+ timeToSampleTable: [],
1978
+ compositionTimeOffsetTable: [],
1979
+ lastTimescaleUnits: null,
1980
+ lastSample: null,
1981
+ compactlyCodedChunkTable: []
1982
+ });
1983
+ }
1984
+ if (__privateGet(this, _options).audio) {
1985
+ __privateSet(this, _audioTrack, {
1986
+ id: __privateGet(this, _options).video ? 2 : 1,
1987
+ info: {
1988
+ type: "audio",
1989
+ codec: __privateGet(this, _options).audio.codec,
1990
+ numberOfChannels: __privateGet(this, _options).audio.numberOfChannels,
1991
+ sampleRate: __privateGet(this, _options).audio.sampleRate,
1992
+ decoderConfig: null
1993
+ },
1994
+ timescale: __privateGet(this, _options).audio.sampleRate,
1995
+ samples: [],
1996
+ finalizedChunks: [],
1997
+ currentChunk: null,
1998
+ firstDecodeTimestamp: void 0,
1999
+ lastDecodeTimestamp: -1,
2000
+ timeToSampleTable: [],
2001
+ compositionTimeOffsetTable: [],
2002
+ lastTimescaleUnits: null,
2003
+ lastSample: null,
2004
+ compactlyCodedChunkTable: []
2005
+ });
2006
+ if (__privateGet(this, _options).audio.codec === "aac") {
2007
+ let guessedCodecPrivate = __privateMethod(this, _generateMpeg4AudioSpecificConfig, generateMpeg4AudioSpecificConfig_fn).call(
2008
+ this,
2009
+ 2,
2010
+ // Object type for AAC-LC, since it's the most common
2011
+ __privateGet(this, _options).audio.sampleRate,
2012
+ __privateGet(this, _options).audio.numberOfChannels
2013
+ );
2014
+ __privateGet(this, _audioTrack).info.decoderConfig = {
2015
+ codec: __privateGet(this, _options).audio.codec,
2016
+ description: guessedCodecPrivate,
2017
+ numberOfChannels: __privateGet(this, _options).audio.numberOfChannels,
2018
+ sampleRate: __privateGet(this, _options).audio.sampleRate
2019
+ };
2020
+ }
2021
+ }
2022
+ };
2023
+ _generateMpeg4AudioSpecificConfig = /* @__PURE__ */ new WeakSet();
2024
+ generateMpeg4AudioSpecificConfig_fn = function(objectType, sampleRate, numberOfChannels) {
2025
+ let frequencyIndices = [96e3, 88200, 64e3, 48e3, 44100, 32e3, 24e3, 22050, 16e3, 12e3, 11025, 8e3, 7350];
2026
+ let frequencyIndex = frequencyIndices.indexOf(sampleRate);
2027
+ let channelConfig = numberOfChannels;
2028
+ let configBits = "";
2029
+ configBits += objectType.toString(2).padStart(5, "0");
2030
+ configBits += frequencyIndex.toString(2).padStart(4, "0");
2031
+ if (frequencyIndex === 15)
2032
+ configBits += sampleRate.toString(2).padStart(24, "0");
2033
+ configBits += channelConfig.toString(2).padStart(4, "0");
2034
+ let paddingLength = Math.ceil(configBits.length / 8) * 8;
2035
+ configBits = configBits.padEnd(paddingLength, "0");
2036
+ let configBytes = new Uint8Array(configBits.length / 8);
2037
+ for (let i2 = 0; i2 < configBits.length; i2 += 8) {
2038
+ configBytes[i2 / 8] = parseInt(configBits.slice(i2, i2 + 8), 2);
2039
+ }
2040
+ return configBytes;
2041
+ };
2042
+ _createSampleForTrack = /* @__PURE__ */ new WeakSet();
2043
+ createSampleForTrack_fn = function(track, data, type, timestamp, duration, meta, compositionTimeOffset) {
2044
+ let presentationTimestampInSeconds = timestamp / 1e6;
2045
+ let decodeTimestampInSeconds = (timestamp - (compositionTimeOffset ?? 0)) / 1e6;
2046
+ let durationInSeconds = duration / 1e6;
2047
+ let adjusted = __privateMethod(this, _validateTimestamp, validateTimestamp_fn).call(this, presentationTimestampInSeconds, decodeTimestampInSeconds, track);
2048
+ presentationTimestampInSeconds = adjusted.presentationTimestamp;
2049
+ decodeTimestampInSeconds = adjusted.decodeTimestamp;
2050
+ if (meta?.decoderConfig) {
2051
+ if (track.info.decoderConfig === null) {
2052
+ track.info.decoderConfig = meta.decoderConfig;
2053
+ } else {
2054
+ Object.assign(track.info.decoderConfig, meta.decoderConfig);
2055
+ }
2056
+ }
2057
+ let sample = {
2058
+ presentationTimestamp: presentationTimestampInSeconds,
2059
+ decodeTimestamp: decodeTimestampInSeconds,
2060
+ duration: durationInSeconds,
2061
+ data,
2062
+ size: data.byteLength,
2063
+ type,
2064
+ // Will be refined once the next sample comes in
2065
+ timescaleUnitsToNextSample: intoTimescale(durationInSeconds, track.timescale)
2066
+ };
2067
+ return sample;
2068
+ };
2069
+ _addSampleToTrack = /* @__PURE__ */ new WeakSet();
2070
+ addSampleToTrack_fn = function(track, sample) {
2071
+ if (__privateGet(this, _options).fastStart !== "fragmented") {
2072
+ track.samples.push(sample);
2073
+ }
2074
+ const sampleCompositionTimeOffset = intoTimescale(sample.presentationTimestamp - sample.decodeTimestamp, track.timescale);
2075
+ if (track.lastTimescaleUnits !== null) {
2076
+ let timescaleUnits = intoTimescale(sample.decodeTimestamp, track.timescale, false);
2077
+ let delta = Math.round(timescaleUnits - track.lastTimescaleUnits);
2078
+ track.lastTimescaleUnits += delta;
2079
+ track.lastSample.timescaleUnitsToNextSample = delta;
2080
+ if (__privateGet(this, _options).fastStart !== "fragmented") {
2081
+ let lastTableEntry = last(track.timeToSampleTable);
2082
+ if (lastTableEntry.sampleCount === 1) {
2083
+ lastTableEntry.sampleDelta = delta;
2084
+ lastTableEntry.sampleCount++;
2085
+ } else if (lastTableEntry.sampleDelta === delta) {
2086
+ lastTableEntry.sampleCount++;
2087
+ } else {
2088
+ lastTableEntry.sampleCount--;
2089
+ track.timeToSampleTable.push({
2090
+ sampleCount: 2,
2091
+ sampleDelta: delta
2092
+ });
2093
+ }
2094
+ const lastCompositionTimeOffsetTableEntry = last(track.compositionTimeOffsetTable);
2095
+ if (lastCompositionTimeOffsetTableEntry.sampleCompositionTimeOffset === sampleCompositionTimeOffset) {
2096
+ lastCompositionTimeOffsetTableEntry.sampleCount++;
2097
+ } else {
2098
+ track.compositionTimeOffsetTable.push({
2099
+ sampleCount: 1,
2100
+ sampleCompositionTimeOffset
2101
+ });
2102
+ }
2103
+ }
2104
+ } else {
2105
+ track.lastTimescaleUnits = 0;
2106
+ if (__privateGet(this, _options).fastStart !== "fragmented") {
2107
+ track.timeToSampleTable.push({
2108
+ sampleCount: 1,
2109
+ sampleDelta: intoTimescale(sample.duration, track.timescale)
2110
+ });
2111
+ track.compositionTimeOffsetTable.push({
2112
+ sampleCount: 1,
2113
+ sampleCompositionTimeOffset
2114
+ });
2115
+ }
2116
+ }
2117
+ track.lastSample = sample;
2118
+ let beginNewChunk = false;
2119
+ if (!track.currentChunk) {
2120
+ beginNewChunk = true;
2121
+ } else {
2122
+ let currentChunkDuration = sample.presentationTimestamp - track.currentChunk.startTimestamp;
2123
+ if (__privateGet(this, _options).fastStart === "fragmented") {
2124
+ let mostImportantTrack = __privateGet(this, _videoTrack) ?? __privateGet(this, _audioTrack);
2125
+ const chunkDuration = __privateGet(this, _options).minFragmentDuration ?? 1;
2126
+ if (track === mostImportantTrack && sample.type === "key" && currentChunkDuration >= chunkDuration) {
2127
+ beginNewChunk = true;
2128
+ __privateMethod(this, _finalizeFragment, finalizeFragment_fn).call(this);
2129
+ }
2130
+ } else {
2131
+ beginNewChunk = currentChunkDuration >= 0.5;
2132
+ }
2133
+ }
2134
+ if (beginNewChunk) {
2135
+ if (track.currentChunk) {
2136
+ __privateMethod(this, _finalizeCurrentChunk, finalizeCurrentChunk_fn).call(this, track);
2137
+ }
2138
+ track.currentChunk = {
2139
+ startTimestamp: sample.presentationTimestamp,
2140
+ samples: []
2141
+ };
2142
+ }
2143
+ track.currentChunk.samples.push(sample);
2144
+ };
2145
+ _validateTimestamp = /* @__PURE__ */ new WeakSet();
2146
+ validateTimestamp_fn = function(presentationTimestamp, decodeTimestamp, track) {
2147
+ const strictTimestampBehavior = __privateGet(this, _options).firstTimestampBehavior === "strict";
2148
+ const noLastDecodeTimestamp = track.lastDecodeTimestamp === -1;
2149
+ const timestampNonZero = decodeTimestamp !== 0;
2150
+ if (strictTimestampBehavior && noLastDecodeTimestamp && timestampNonZero) {
2151
+ throw new Error(
2152
+ `The first chunk for your media track must have a timestamp of 0 (received DTS=${decodeTimestamp}).Non-zero first timestamps are often caused by directly piping frames or audio data from a MediaStreamTrack into the encoder. Their timestamps are typically relative to the age of thedocument, which is probably what you want.
2153
+
2154
+ If you want to offset all timestamps of a track such that the first one is zero, set firstTimestampBehavior: 'offset' in the options.
2155
+ `
2156
+ );
2157
+ } else if (__privateGet(this, _options).firstTimestampBehavior === "offset" || __privateGet(this, _options).firstTimestampBehavior === "cross-track-offset") {
2158
+ if (track.firstDecodeTimestamp === void 0) {
2159
+ track.firstDecodeTimestamp = decodeTimestamp;
2160
+ }
2161
+ let baseDecodeTimestamp;
2162
+ if (__privateGet(this, _options).firstTimestampBehavior === "offset") {
2163
+ baseDecodeTimestamp = track.firstDecodeTimestamp;
2164
+ } else {
2165
+ baseDecodeTimestamp = Math.min(
2166
+ __privateGet(this, _videoTrack)?.firstDecodeTimestamp ?? Infinity,
2167
+ __privateGet(this, _audioTrack)?.firstDecodeTimestamp ?? Infinity
2168
+ );
2169
+ }
2170
+ decodeTimestamp -= baseDecodeTimestamp;
2171
+ presentationTimestamp -= baseDecodeTimestamp;
2172
+ }
2173
+ if (decodeTimestamp < track.lastDecodeTimestamp) {
2174
+ throw new Error(
2175
+ `Timestamps must be monotonically increasing (DTS went from ${track.lastDecodeTimestamp * 1e6} to ${decodeTimestamp * 1e6}).`
2176
+ );
2177
+ }
2178
+ track.lastDecodeTimestamp = decodeTimestamp;
2179
+ return { presentationTimestamp, decodeTimestamp };
2180
+ };
2181
+ _finalizeCurrentChunk = /* @__PURE__ */ new WeakSet();
2182
+ finalizeCurrentChunk_fn = function(track) {
2183
+ if (__privateGet(this, _options).fastStart === "fragmented") {
2184
+ throw new Error("Can't finalize individual chunks if 'fastStart' is set to 'fragmented'.");
2185
+ }
2186
+ if (!track.currentChunk)
2187
+ return;
2188
+ track.finalizedChunks.push(track.currentChunk);
2189
+ __privateGet(this, _finalizedChunks).push(track.currentChunk);
2190
+ if (track.compactlyCodedChunkTable.length === 0 || last(track.compactlyCodedChunkTable).samplesPerChunk !== track.currentChunk.samples.length) {
2191
+ track.compactlyCodedChunkTable.push({
2192
+ firstChunk: track.finalizedChunks.length,
2193
+ // 1-indexed
2194
+ samplesPerChunk: track.currentChunk.samples.length
2195
+ });
2196
+ }
2197
+ if (__privateGet(this, _options).fastStart === "in-memory") {
2198
+ track.currentChunk.offset = 0;
2199
+ return;
2200
+ }
2201
+ track.currentChunk.offset = __privateGet(this, _writer).pos;
2202
+ for (let sample of track.currentChunk.samples) {
2203
+ __privateGet(this, _writer).write(sample.data);
2204
+ sample.data = null;
2205
+ }
2206
+ __privateMethod(this, _maybeFlushStreamingTargetWriter, maybeFlushStreamingTargetWriter_fn).call(this);
2207
+ };
2208
+ _finalizeFragment = /* @__PURE__ */ new WeakSet();
2209
+ finalizeFragment_fn = function(flushStreamingWriter = true) {
2210
+ if (__privateGet(this, _options).fastStart !== "fragmented") {
2211
+ throw new Error("Can't finalize a fragment unless 'fastStart' is set to 'fragmented'.");
2212
+ }
2213
+ let tracks = [__privateGet(this, _videoTrack), __privateGet(this, _audioTrack)].filter((track) => track && track.currentChunk);
2214
+ if (tracks.length === 0)
2215
+ return;
2216
+ let fragmentNumber = __privateWrapper(this, _nextFragmentNumber)._++;
2217
+ if (fragmentNumber === 1) {
2218
+ let movieBox = moov(tracks, __privateGet(this, _creationTime), true);
2219
+ __privateGet(this, _writer).writeBox(movieBox);
2220
+ }
2221
+ let moofOffset = __privateGet(this, _writer).pos;
2222
+ let moofBox = moof(fragmentNumber, tracks);
2223
+ __privateGet(this, _writer).writeBox(moofBox);
2224
+ {
2225
+ let mdatBox = mdat(false);
2226
+ let totalTrackSampleSize = 0;
2227
+ for (let track of tracks) {
2228
+ for (let sample of track.currentChunk.samples) {
2229
+ totalTrackSampleSize += sample.size;
2230
+ }
2231
+ }
2232
+ let mdatSize = __privateGet(this, _writer).measureBox(mdatBox) + totalTrackSampleSize;
2233
+ if (mdatSize >= 2 ** 32) {
2234
+ mdatBox.largeSize = true;
2235
+ mdatSize = __privateGet(this, _writer).measureBox(mdatBox) + totalTrackSampleSize;
2236
+ }
2237
+ mdatBox.size = mdatSize;
2238
+ __privateGet(this, _writer).writeBox(mdatBox);
2239
+ }
2240
+ for (let track of tracks) {
2241
+ track.currentChunk.offset = __privateGet(this, _writer).pos;
2242
+ track.currentChunk.moofOffset = moofOffset;
2243
+ for (let sample of track.currentChunk.samples) {
2244
+ __privateGet(this, _writer).write(sample.data);
2245
+ sample.data = null;
2246
+ }
2247
+ }
2248
+ let endPos = __privateGet(this, _writer).pos;
2249
+ __privateGet(this, _writer).seek(__privateGet(this, _writer).offsets.get(moofBox));
2250
+ let newMoofBox = moof(fragmentNumber, tracks);
2251
+ __privateGet(this, _writer).writeBox(newMoofBox);
2252
+ __privateGet(this, _writer).seek(endPos);
2253
+ for (let track of tracks) {
2254
+ track.finalizedChunks.push(track.currentChunk);
2255
+ __privateGet(this, _finalizedChunks).push(track.currentChunk);
2256
+ track.currentChunk = null;
2257
+ }
2258
+ if (flushStreamingWriter) {
2259
+ __privateMethod(this, _maybeFlushStreamingTargetWriter, maybeFlushStreamingTargetWriter_fn).call(this);
2260
+ }
2261
+ };
2262
+ _maybeFlushStreamingTargetWriter = /* @__PURE__ */ new WeakSet();
2263
+ maybeFlushStreamingTargetWriter_fn = function() {
2264
+ if (__privateGet(this, _writer) instanceof StreamTargetWriter) {
2265
+ __privateGet(this, _writer).flush();
2266
+ }
2267
+ };
2268
+ _ensureNotFinalized = /* @__PURE__ */ new WeakSet();
2269
+ ensureNotFinalized_fn = function() {
2270
+ if (__privateGet(this, _finalized)) {
2271
+ throw new Error("Cannot add new video or audio chunks after the file has been finalized.");
2272
+ }
2273
+ };
2274
+ const CANVAS_WIDTH = 1280;
2275
+ const CANVAS_HEIGHT = 720;
2276
+ function isWebCodecsSupported() {
2277
+ return typeof VideoEncoder !== "undefined" && typeof VideoFrame !== "undefined";
2278
+ }
2279
+ function createVideoRenderer(options) {
2280
+ const { scene, timeline: timeline2, fps, bitrate, scale, onProgress } = options;
2281
+ let cancelled = false;
2282
+ let encoder = null;
2283
+ let muxer = null;
2284
+ let encoderError = null;
2285
+ return {
2286
+ async start() {
2287
+ if (!isWebCodecsSupported()) {
2288
+ throw new Error("WebCodecs is not supported in this browser. Please use Chrome, Edge, or Opera.");
2289
+ }
2290
+ const duration = timeline2.duration;
2291
+ const totalFrames = Math.ceil(duration / 1e3 * fps);
2292
+ const frameDurationUs = 1e6 / fps;
2293
+ const videoWidth = CANVAS_WIDTH * scale;
2294
+ const videoHeight = CANVAS_HEIGHT * scale;
2295
+ const canvas = document.createElement("canvas");
2296
+ canvas.width = videoWidth;
2297
+ canvas.height = videoHeight;
2298
+ const ctx = canvas.getContext("2d");
2299
+ const animCtx = new AnimCtxImpl();
2300
+ muxer = new Muxer({
2301
+ target: new ArrayBufferTarget(),
2302
+ video: {
2303
+ codec: "avc",
2304
+ width: videoWidth,
2305
+ height: videoHeight
2306
+ },
2307
+ fastStart: "in-memory"
2308
+ });
2309
+ encoder = new VideoEncoder({
2310
+ output: (chunk, meta) => {
2311
+ if (cancelled || !muxer) return;
2312
+ muxer.addVideoChunk(chunk, meta);
2313
+ },
2314
+ error: (e) => {
2315
+ console.error("VideoEncoder error:", e);
2316
+ encoderError = e;
2317
+ }
2318
+ });
2319
+ encoder.configure({
2320
+ codec: "avc1.640032",
2321
+ // H.264 High Profile Level 5.1 (supports up to 4K)
2322
+ width: videoWidth,
2323
+ height: videoHeight,
2324
+ bitrate,
2325
+ framerate: fps
2326
+ });
2327
+ for (let frame = 0; frame < totalFrames && !cancelled && !encoderError; frame++) {
2328
+ const timeMs = frame / fps * 1e3;
2329
+ animCtx.reset(scene);
2330
+ timeline2.evaluate(timeMs, animCtx);
2331
+ const solved = animCtx.solve(scene);
2332
+ ctx.fillStyle = "#ffffff";
2333
+ ctx.fillRect(0, 0, canvas.width, canvas.height);
2334
+ scene.render(solved, canvas, scale);
2335
+ const videoFrame = new VideoFrame(canvas, {
2336
+ timestamp: frame * frameDurationUs
2337
+ });
2338
+ const keyFrame = frame % fps === 0;
2339
+ encoder.encode(videoFrame, { keyFrame });
2340
+ videoFrame.close();
2341
+ onProgress((frame + 1) / totalFrames, frame + 1, totalFrames);
2342
+ if (frame % 5 === 0) {
2343
+ await new Promise((resolve) => setTimeout(resolve, 0));
2344
+ }
2345
+ }
2346
+ if (encoderError) {
2347
+ throw encoderError;
2348
+ }
2349
+ if (cancelled) {
2350
+ encoder.close();
2351
+ throw new Error("Rendering cancelled");
2352
+ }
2353
+ await encoder.flush();
2354
+ encoder.close();
2355
+ encoder = null;
2356
+ if (cancelled || !muxer) {
2357
+ throw new Error("Rendering cancelled");
2358
+ }
2359
+ muxer.finalize();
2360
+ const buffer = muxer.target.buffer;
2361
+ muxer = null;
2362
+ const blob = new Blob([buffer], { type: "video/mp4" });
2363
+ return {
2364
+ blob,
2365
+ duration,
2366
+ frames: totalFrames
2367
+ };
2368
+ },
2369
+ cancel() {
2370
+ cancelled = true;
2371
+ if (encoder && encoder.state !== "closed") {
2372
+ encoder.close();
2373
+ }
2374
+ }
2375
+ };
2376
+ }
2377
+ function downloadBlob(blob, filename) {
2378
+ const url2 = URL.createObjectURL(blob);
2379
+ const a2 = document.createElement("a");
2380
+ a2.href = url2;
2381
+ a2.download = filename;
2382
+ document.body.appendChild(a2);
2383
+ a2.click();
2384
+ document.body.removeChild(a2);
2385
+ URL.revokeObjectURL(url2);
2386
+ }
2387
+ function defineAnimation(fn) {
2388
+ const scene = makeScene();
2389
+ const timelineBuilder = fn(scene);
2390
+ const tl = timeline(timelineBuilder);
2391
+ return { scene, timeline: tl };
2392
+ }
2393
+ const createAnimation = defineAnimation;
2394
+ export {
2395
+ S as Screen,
2396
+ V as Vec,
2397
+ b as angle,
2398
+ createAnimation,
2399
+ createVideoRenderer,
2400
+ defineAnimation,
2401
+ downloadBlob,
2402
+ easeBack,
2403
+ easeBounce,
2404
+ easeCirc,
2405
+ easeElastic,
2406
+ easeIn,
2407
+ easeInOut,
2408
+ easeOut,
2409
+ a as isExpr,
2410
+ i as isGroup,
2411
+ isWebCodecsSupported,
2412
+ l as lerp,
2413
+ linear,
2414
+ m as makePlayer,
2415
+ makeScene,
2416
+ overShoot,
2417
+ timeline,
2418
+ v as vec
2419
+ };
2420
+ //# sourceMappingURL=index.js.map