@editframe/elements 0.5.0-beta.8 → 0.6.0-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/lib/av/EncodedAsset.cjs +561 -0
- package/dist/{editor/util/EncodedAsset/EncodedAsset.mjs → lib/av/EncodedAsset.js} +40 -33
- package/dist/lib/av/MP4File.cjs +182 -0
- package/dist/{editor/util/MP4File.mjs → lib/av/MP4File.js} +55 -51
- package/dist/lib/av/msToTimeCode.cjs +15 -0
- package/dist/lib/util/awaitMicrotask.cjs +8 -0
- package/dist/lib/util/memoize.cjs +14 -0
- package/dist/{util/memoize.mjs → lib/util/memoize.js} +1 -2
- package/dist/packages/elements/src/EF_FRAMEGEN.cjs +197 -0
- package/dist/packages/elements/src/EF_FRAMEGEN.d.ts +45 -0
- package/dist/packages/elements/src/EF_FRAMEGEN.js +197 -0
- package/dist/packages/elements/src/EF_INTERACTIVE.cjs +4 -0
- package/dist/packages/elements/src/EF_INTERACTIVE.d.ts +1 -0
- package/dist/packages/elements/src/elements/CrossUpdateController.cjs +16 -0
- package/dist/packages/elements/src/elements/CrossUpdateController.d.ts +9 -0
- package/dist/packages/elements/src/elements/EFAudio.cjs +53 -0
- package/dist/packages/elements/src/elements/EFAudio.d.ts +10 -0
- package/dist/{elements/src/elements/EFAudio.mjs → packages/elements/src/elements/EFAudio.js} +2 -5
- package/dist/packages/elements/src/elements/EFCaptions.cjs +171 -0
- package/dist/packages/elements/src/elements/EFCaptions.d.ts +39 -0
- package/dist/{elements/src/elements/EFCaptions.mjs → packages/elements/src/elements/EFCaptions.js} +18 -20
- package/dist/packages/elements/src/elements/EFImage.cjs +79 -0
- package/dist/packages/elements/src/elements/EFImage.d.ts +14 -0
- package/dist/{elements/src/elements/EFImage.mjs → packages/elements/src/elements/EFImage.js} +8 -7
- package/dist/packages/elements/src/elements/EFMedia.cjs +334 -0
- package/dist/packages/elements/src/elements/EFMedia.d.ts +61 -0
- package/dist/{elements/src/elements/EFMedia.mjs → packages/elements/src/elements/EFMedia.js} +40 -38
- package/dist/packages/elements/src/elements/EFSourceMixin.cjs +55 -0
- package/dist/packages/elements/src/elements/EFSourceMixin.d.ts +12 -0
- package/dist/{elements/src/elements/EFSourceMixin.mjs → packages/elements/src/elements/EFSourceMixin.js} +6 -8
- package/dist/packages/elements/src/elements/EFTemporal.cjs +198 -0
- package/dist/packages/elements/src/elements/EFTemporal.d.ts +36 -0
- package/dist/{elements/src/elements/EFTemporal.mjs → packages/elements/src/elements/EFTemporal.js} +6 -22
- package/dist/packages/elements/src/elements/EFTimegroup.browsertest.d.ts +12 -0
- package/{src/elements/EFTimegroup.ts → dist/packages/elements/src/elements/EFTimegroup.cjs} +162 -213
- package/dist/packages/elements/src/elements/EFTimegroup.d.ts +39 -0
- package/dist/{elements/src/elements/EFTimegroup.mjs → packages/elements/src/elements/EFTimegroup.js} +55 -65
- package/{src/elements/EFTimeline.ts → dist/packages/elements/src/elements/EFTimeline.cjs} +5 -3
- package/dist/packages/elements/src/elements/EFTimeline.d.ts +3 -0
- package/dist/{elements/src/elements/EFTimeline.mjs → packages/elements/src/elements/EFTimeline.js} +5 -2
- package/dist/packages/elements/src/elements/EFVideo.cjs +110 -0
- package/dist/packages/elements/src/elements/EFVideo.d.ts +14 -0
- package/dist/{elements/src/elements/EFVideo.mjs → packages/elements/src/elements/EFVideo.js} +10 -32
- package/dist/packages/elements/src/elements/EFWaveform.cjs +235 -0
- package/dist/packages/elements/src/elements/EFWaveform.d.ts +28 -0
- package/dist/{elements/src/elements/EFWaveform.mjs → packages/elements/src/elements/EFWaveform.js} +15 -16
- package/dist/packages/elements/src/elements/FetchMixin.cjs +28 -0
- package/dist/packages/elements/src/elements/FetchMixin.d.ts +8 -0
- package/dist/{elements/src/elements/FetchMixin.mjs → packages/elements/src/elements/FetchMixin.js} +5 -7
- package/dist/packages/elements/src/elements/TimegroupController.cjs +20 -0
- package/dist/packages/elements/src/elements/TimegroupController.d.ts +14 -0
- package/dist/packages/elements/src/elements/durationConverter.cjs +8 -0
- package/dist/packages/elements/src/elements/durationConverter.d.ts +4 -0
- package/dist/{elements/src/elements/durationConverter.mjs → packages/elements/src/elements/durationConverter.js} +1 -1
- package/dist/packages/elements/src/elements/parseTimeToMs.cjs +12 -0
- package/dist/packages/elements/src/elements/parseTimeToMs.d.ts +1 -0
- package/dist/packages/elements/src/elements/parseTimeToMs.js +12 -0
- package/dist/packages/elements/src/elements/util.cjs +11 -0
- package/dist/packages/elements/src/elements/util.d.ts +4 -0
- package/dist/{elements/src/elements/util.mjs → packages/elements/src/elements/util.js} +1 -1
- package/dist/packages/elements/src/gui/EFFilmstrip.cjs +675 -0
- package/dist/packages/elements/src/gui/EFFilmstrip.d.ts +138 -0
- package/dist/{elements/src/gui/EFFilmstrip.mjs → packages/elements/src/gui/EFFilmstrip.js} +57 -55
- package/dist/packages/elements/src/gui/EFWorkbench.cjs +199 -0
- package/dist/packages/elements/src/gui/EFWorkbench.d.ts +44 -0
- package/dist/{elements/src/gui/EFWorkbench.mjs → packages/elements/src/gui/EFWorkbench.js} +27 -49
- package/{src/gui/TWMixin.ts → dist/packages/elements/src/gui/TWMixin.cjs} +11 -10
- package/dist/packages/elements/src/gui/TWMixin.css.cjs +3 -0
- package/dist/packages/elements/src/gui/TWMixin.css.js +4 -0
- package/dist/packages/elements/src/gui/TWMixin.d.ts +3 -0
- package/dist/{elements/src/gui/TWMixin.mjs → packages/elements/src/gui/TWMixin.js} +4 -3
- package/dist/packages/elements/src/index.cjs +47 -0
- package/dist/packages/elements/src/index.d.ts +10 -0
- package/dist/packages/elements/src/index.js +23 -0
- package/dist/style.css +13 -4
- package/package.json +23 -4
- package/dist/elements/src/EF_FRAMEGEN.mjs +0 -130
- package/dist/elements/src/elements/parseTimeToMs.mjs +0 -13
- package/dist/elements/src/elements.mjs +0 -12
- package/dist/elements/src/gui/TWMixin.css.mjs +0 -4
- package/dist/util/awaitAnimationFrame.mjs +0 -11
- package/docker-compose.yaml +0 -17
- package/src/EF_FRAMEGEN.ts +0 -208
- package/src/EF_INTERACTIVE.ts +0 -2
- package/src/elements/CrossUpdateController.ts +0 -18
- package/src/elements/EFAudio.ts +0 -42
- package/src/elements/EFCaptions.ts +0 -202
- package/src/elements/EFImage.ts +0 -70
- package/src/elements/EFMedia.ts +0 -395
- package/src/elements/EFSourceMixin.ts +0 -57
- package/src/elements/EFTemporal.ts +0 -246
- package/src/elements/EFTimegroup.browsertest.ts +0 -360
- package/src/elements/EFVideo.ts +0 -114
- package/src/elements/EFWaveform.ts +0 -407
- package/src/elements/FetchMixin.ts +0 -18
- package/src/elements/TimegroupController.ts +0 -25
- package/src/elements/buildLitFixture.ts +0 -13
- package/src/elements/durationConverter.ts +0 -6
- package/src/elements/parseTimeToMs.ts +0 -10
- package/src/elements/util.ts +0 -24
- package/src/gui/EFFilmstrip.ts +0 -702
- package/src/gui/EFWorkbench.ts +0 -242
- package/src/gui/TWMixin.css +0 -3
- package/src/util.d.ts +0 -1
- /package/dist/{editor/msToTimeCode.mjs → lib/av/msToTimeCode.js} +0 -0
- /package/dist/{util/awaitMicrotask.mjs → lib/util/awaitMicrotask.js} +0 -0
- /package/dist/{elements/src/EF_INTERACTIVE.mjs → packages/elements/src/EF_INTERACTIVE.js} +0 -0
- /package/dist/{elements/src/elements/CrossUpdateController.mjs → packages/elements/src/elements/CrossUpdateController.js} +0 -0
- /package/dist/{elements/src/elements/TimegroupController.mjs → packages/elements/src/elements/TimegroupController.js} +0 -0
|
@@ -1,15 +1,14 @@
|
|
|
1
|
-
import { memoize } from "../../../util/memoize.mjs";
|
|
2
1
|
import * as MP4Box from "mp4box";
|
|
3
|
-
import {
|
|
2
|
+
import { memoize } from "../util/memoize.js";
|
|
3
|
+
import { MP4File } from "./MP4File.js";
|
|
4
4
|
var __defProp = Object.defineProperty;
|
|
5
5
|
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
6
6
|
var __decorateClass = (decorators, target, key, kind) => {
|
|
7
|
-
var result =
|
|
7
|
+
var result = __getOwnPropDesc(target, key);
|
|
8
8
|
for (var i = decorators.length - 1, decorator; i >= 0; i--)
|
|
9
9
|
if (decorator = decorators[i])
|
|
10
|
-
result =
|
|
11
|
-
if (
|
|
12
|
-
__defProp(target, key, result);
|
|
10
|
+
result = decorator(target, key, result) || result;
|
|
11
|
+
if (result) __defProp(target, key, result);
|
|
13
12
|
return result;
|
|
14
13
|
};
|
|
15
14
|
const BUFFER_SIZE = 10;
|
|
@@ -49,7 +48,7 @@ class ISOFileAsset extends FileAsset {
|
|
|
49
48
|
}
|
|
50
49
|
__decorateClass([
|
|
51
50
|
memoize
|
|
52
|
-
], ISOFileAsset.prototype, "fileInfo"
|
|
51
|
+
], ISOFileAsset.prototype, "fileInfo");
|
|
53
52
|
const _VideoAsset = class _VideoAsset2 extends ISOFileAsset {
|
|
54
53
|
constructor(localName, mp4boxFile, file) {
|
|
55
54
|
super(localName, file, mp4boxFile);
|
|
@@ -112,11 +111,12 @@ const _VideoAsset = class _VideoAsset2 extends ISOFileAsset {
|
|
|
112
111
|
this.requestedSampleNumber = 0;
|
|
113
112
|
this.outCursor = 0;
|
|
114
113
|
this.sampleCursor = 0;
|
|
115
|
-
this.decodedFrames
|
|
114
|
+
for (const frame of this.decodedFrames) {
|
|
116
115
|
frame.close();
|
|
117
|
-
}
|
|
116
|
+
}
|
|
118
117
|
this.decodedFrames = [];
|
|
119
118
|
this.lastDecodedSample = void 0;
|
|
119
|
+
this.lastSoughtFrame?.close();
|
|
120
120
|
this.lastSoughtFrame = void 0;
|
|
121
121
|
}
|
|
122
122
|
addEventListener(type, callback) {
|
|
@@ -127,9 +127,9 @@ const _VideoAsset = class _VideoAsset2 extends ISOFileAsset {
|
|
|
127
127
|
this.eventListeners[type]?.delete(callback);
|
|
128
128
|
}
|
|
129
129
|
emit(type, ...args) {
|
|
130
|
-
this.eventListeners[type]
|
|
130
|
+
for (const listener of this.eventListeners[type] ?? []) {
|
|
131
131
|
listener(...args);
|
|
132
|
-
}
|
|
132
|
+
}
|
|
133
133
|
}
|
|
134
134
|
get videoCodec() {
|
|
135
135
|
if (!this.defaultVideoTrack) {
|
|
@@ -220,10 +220,15 @@ const _VideoAsset = class _VideoAsset2 extends ISOFileAsset {
|
|
|
220
220
|
}
|
|
221
221
|
async decodeSlice(start, end) {
|
|
222
222
|
const samples = this.samples.slice(start, end + 1);
|
|
223
|
-
const
|
|
224
|
-
const
|
|
223
|
+
const firstSample = samples[0];
|
|
224
|
+
const lastSample = samples[samples.length - 1];
|
|
225
|
+
if (!firstSample || !lastSample) {
|
|
226
|
+
throw new Error("Samples not found");
|
|
227
|
+
}
|
|
228
|
+
const sliceStart = firstSample.offset;
|
|
229
|
+
const sliceEnd = lastSample.offset + lastSample.size;
|
|
225
230
|
const buffer = await this.file.slice(sliceStart, sliceEnd).arrayBuffer();
|
|
226
|
-
const firstSampleOffset =
|
|
231
|
+
const firstSampleOffset = firstSample.offset;
|
|
227
232
|
for (let i = start; i <= end; i++) {
|
|
228
233
|
await this.waitUntilVideoQueueDrained();
|
|
229
234
|
const sample = this.getSample(i);
|
|
@@ -246,7 +251,7 @@ const _VideoAsset = class _VideoAsset2 extends ISOFileAsset {
|
|
|
246
251
|
if (!this.defaultVideoTrack) {
|
|
247
252
|
throw new Error("No default video track found");
|
|
248
253
|
}
|
|
249
|
-
let description;
|
|
254
|
+
let description = new Uint8Array();
|
|
250
255
|
const trak = this.mp4boxFile.getTrackById(this.defaultVideoTrack.id);
|
|
251
256
|
for (const entry of trak.mdia.minf.stbl.stsd.entries) {
|
|
252
257
|
if (entry.avcC ?? entry.hvcC) {
|
|
@@ -268,7 +273,6 @@ const _VideoAsset = class _VideoAsset2 extends ISOFileAsset {
|
|
|
268
273
|
codec: this.defaultVideoTrack.codec,
|
|
269
274
|
codedWidth: this.defaultVideoTrack.track_width,
|
|
270
275
|
codedHeight: this.defaultVideoTrack.track_height,
|
|
271
|
-
// hardwareAcceleration: "prefer-hardware",
|
|
272
276
|
optimizeForLatency: true,
|
|
273
277
|
description
|
|
274
278
|
};
|
|
@@ -287,7 +291,7 @@ const _VideoAsset = class _VideoAsset2 extends ISOFileAsset {
|
|
|
287
291
|
getSample(index = -1) {
|
|
288
292
|
const sample = this.samples?.[index];
|
|
289
293
|
if (!sample) {
|
|
290
|
-
throw new Error(
|
|
294
|
+
throw new Error(`Sample not found at index ${index}`);
|
|
291
295
|
}
|
|
292
296
|
return sample;
|
|
293
297
|
}
|
|
@@ -380,9 +384,9 @@ const _VideoAsset = class _VideoAsset2 extends ISOFileAsset {
|
|
|
380
384
|
if (this.seekingWillGoBackwards(seconds)) {
|
|
381
385
|
console.log("BACKWARDS FLUSH");
|
|
382
386
|
await this.videoDecoder.flush();
|
|
383
|
-
this.decodedFrames
|
|
387
|
+
for (const frame2 of this.decodedFrames) {
|
|
384
388
|
frame2.close();
|
|
385
|
-
}
|
|
389
|
+
}
|
|
386
390
|
this.decodedFrames = [];
|
|
387
391
|
let syncSampleNumber = sample.number;
|
|
388
392
|
while (!this.getSample(syncSampleNumber).is_sync) {
|
|
@@ -406,6 +410,9 @@ const _VideoAsset = class _VideoAsset2 extends ISOFileAsset {
|
|
|
406
410
|
}
|
|
407
411
|
this.removeEventListener("frame", maybeFrame);
|
|
408
412
|
if (frame) {
|
|
413
|
+
if (this.lastSoughtFrame && !this.decodedFrames.includes(this.lastSoughtFrame)) {
|
|
414
|
+
this.lastSoughtFrame.close();
|
|
415
|
+
}
|
|
409
416
|
this.lastSoughtFrame = frame;
|
|
410
417
|
}
|
|
411
418
|
return frame;
|
|
@@ -422,25 +429,25 @@ const _VideoAsset = class _VideoAsset2 extends ISOFileAsset {
|
|
|
422
429
|
};
|
|
423
430
|
__decorateClass([
|
|
424
431
|
memoize
|
|
425
|
-
], _VideoAsset.prototype, "editsOffset"
|
|
432
|
+
], _VideoAsset.prototype, "editsOffset");
|
|
426
433
|
__decorateClass([
|
|
427
434
|
memoize
|
|
428
|
-
], _VideoAsset.prototype, "timescale"
|
|
435
|
+
], _VideoAsset.prototype, "timescale");
|
|
429
436
|
__decorateClass([
|
|
430
437
|
memoize
|
|
431
|
-
], _VideoAsset.prototype, "samples"
|
|
438
|
+
], _VideoAsset.prototype, "samples");
|
|
432
439
|
__decorateClass([
|
|
433
440
|
memoize
|
|
434
|
-
], _VideoAsset.prototype, "displayOrderedSamples"
|
|
441
|
+
], _VideoAsset.prototype, "displayOrderedSamples");
|
|
435
442
|
__decorateClass([
|
|
436
443
|
memoize
|
|
437
|
-
], _VideoAsset.prototype, "defaultVideoTrack"
|
|
444
|
+
], _VideoAsset.prototype, "defaultVideoTrack");
|
|
438
445
|
__decorateClass([
|
|
439
446
|
memoize
|
|
440
|
-
], _VideoAsset.prototype, "defaultVideoTrak"
|
|
447
|
+
], _VideoAsset.prototype, "defaultVideoTrak");
|
|
441
448
|
__decorateClass([
|
|
442
449
|
memoize
|
|
443
|
-
], _VideoAsset.prototype, "duration"
|
|
450
|
+
], _VideoAsset.prototype, "duration");
|
|
444
451
|
let VideoAsset = _VideoAsset;
|
|
445
452
|
const _AudioAsset = class _AudioAsset2 extends ISOFileAsset {
|
|
446
453
|
static async createFromReadableStream(id, stream, file) {
|
|
@@ -493,19 +500,19 @@ const _AudioAsset = class _AudioAsset2 extends ISOFileAsset {
|
|
|
493
500
|
};
|
|
494
501
|
__decorateClass([
|
|
495
502
|
memoize
|
|
496
|
-
], _AudioAsset.prototype, "defaultAudioTrack"
|
|
503
|
+
], _AudioAsset.prototype, "defaultAudioTrack");
|
|
497
504
|
__decorateClass([
|
|
498
505
|
memoize
|
|
499
|
-
], _AudioAsset.prototype, "defaultAudioTrak"
|
|
506
|
+
], _AudioAsset.prototype, "defaultAudioTrak");
|
|
500
507
|
__decorateClass([
|
|
501
508
|
memoize
|
|
502
|
-
], _AudioAsset.prototype, "audioCodec"
|
|
509
|
+
], _AudioAsset.prototype, "audioCodec");
|
|
503
510
|
__decorateClass([
|
|
504
511
|
memoize
|
|
505
|
-
], _AudioAsset.prototype, "samplerate"
|
|
512
|
+
], _AudioAsset.prototype, "samplerate");
|
|
506
513
|
__decorateClass([
|
|
507
514
|
memoize
|
|
508
|
-
], _AudioAsset.prototype, "channelCount"
|
|
515
|
+
], _AudioAsset.prototype, "channelCount");
|
|
509
516
|
const _ImageAsset = class _ImageAsset2 extends FileAsset {
|
|
510
517
|
static async createFromReadableStream(id, file) {
|
|
511
518
|
if (file.size === 0) {
|
|
@@ -525,10 +532,10 @@ const _ImageAsset = class _ImageAsset2 extends FileAsset {
|
|
|
525
532
|
};
|
|
526
533
|
__decorateClass([
|
|
527
534
|
memoize
|
|
528
|
-
], _ImageAsset.prototype, "objectUrl"
|
|
535
|
+
], _ImageAsset.prototype, "objectUrl");
|
|
529
536
|
__decorateClass([
|
|
530
537
|
memoize
|
|
531
|
-
], _ImageAsset.prototype, "format"
|
|
538
|
+
], _ImageAsset.prototype, "format");
|
|
532
539
|
export {
|
|
533
540
|
AssetNotAvailableLocally,
|
|
534
541
|
FileAsset,
|
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
|
+
const MP4Box = require("mp4box");
|
|
4
|
+
function _interopNamespaceDefault(e) {
|
|
5
|
+
const n = Object.create(null, { [Symbol.toStringTag]: { value: "Module" } });
|
|
6
|
+
if (e) {
|
|
7
|
+
for (const k in e) {
|
|
8
|
+
if (k !== "default") {
|
|
9
|
+
const d = Object.getOwnPropertyDescriptor(e, k);
|
|
10
|
+
Object.defineProperty(n, k, d.get ? d : {
|
|
11
|
+
enumerable: true,
|
|
12
|
+
get: () => e[k]
|
|
13
|
+
});
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
n.default = e;
|
|
18
|
+
return Object.freeze(n);
|
|
19
|
+
}
|
|
20
|
+
const MP4Box__namespace = /* @__PURE__ */ _interopNamespaceDefault(MP4Box);
|
|
21
|
+
class MP4File extends MP4Box__namespace.ISOFile {
|
|
22
|
+
constructor() {
|
|
23
|
+
super(...arguments);
|
|
24
|
+
this.readyPromise = new Promise((resolve, reject) => {
|
|
25
|
+
this.onReady = () => resolve();
|
|
26
|
+
this.onError = reject;
|
|
27
|
+
});
|
|
28
|
+
this.waitingForSamples = [];
|
|
29
|
+
this._hasSeenLastSamples = false;
|
|
30
|
+
this._arrayBufferFileStart = 0;
|
|
31
|
+
}
|
|
32
|
+
setSegmentOptions(id, user, options) {
|
|
33
|
+
const trak = this.getTrackById(id);
|
|
34
|
+
if (trak) {
|
|
35
|
+
trak.nextSample = 0;
|
|
36
|
+
this.fragmentedTracks.push({
|
|
37
|
+
id,
|
|
38
|
+
user,
|
|
39
|
+
trak,
|
|
40
|
+
segmentStream: null,
|
|
41
|
+
nb_samples: "nbSamples" in options && options.nbSamples || 1e3,
|
|
42
|
+
rapAlignement: ("rapAlignement" in options && options.rapAlignement) ?? true
|
|
43
|
+
});
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
/**
|
|
47
|
+
* Fragments all tracks in a file into separate array buffers.
|
|
48
|
+
*/
|
|
49
|
+
async fragmentAllTracks() {
|
|
50
|
+
const trackBuffers = {};
|
|
51
|
+
for await (const segment of this.fragmentIterator()) {
|
|
52
|
+
(trackBuffers[segment.track] ??= []).push(segment.data);
|
|
53
|
+
}
|
|
54
|
+
return trackBuffers;
|
|
55
|
+
}
|
|
56
|
+
async *fragmentIterator() {
|
|
57
|
+
await this.readyPromise;
|
|
58
|
+
const trackInfo = {};
|
|
59
|
+
for (const videoTrack of this.getInfo().videoTracks) {
|
|
60
|
+
trackInfo[videoTrack.id] = { index: 0, complete: false };
|
|
61
|
+
this.setSegmentOptions(videoTrack.id, null, {
|
|
62
|
+
rapAlignement: true
|
|
63
|
+
});
|
|
64
|
+
}
|
|
65
|
+
for (const audioTrack of this.getInfo().audioTracks) {
|
|
66
|
+
trackInfo[audioTrack.id] = { index: 0, complete: false };
|
|
67
|
+
const sampleRate = audioTrack.audio.sample_rate;
|
|
68
|
+
const probablePacketSize = 1024;
|
|
69
|
+
const probableFourSecondsOfSamples = Math.ceil(
|
|
70
|
+
sampleRate / probablePacketSize * 4
|
|
71
|
+
);
|
|
72
|
+
this.setSegmentOptions(audioTrack.id, null, {
|
|
73
|
+
nbSamples: probableFourSecondsOfSamples
|
|
74
|
+
});
|
|
75
|
+
}
|
|
76
|
+
const initSegments = this.initializeSegmentation();
|
|
77
|
+
for (const initSegment of initSegments) {
|
|
78
|
+
yield {
|
|
79
|
+
track: initSegment.id,
|
|
80
|
+
segment: "init",
|
|
81
|
+
data: initSegment.buffer,
|
|
82
|
+
complete: false
|
|
83
|
+
};
|
|
84
|
+
}
|
|
85
|
+
const fragmentStartSamples = {};
|
|
86
|
+
let finishedReading = false;
|
|
87
|
+
const allTracksFinished = () => {
|
|
88
|
+
for (const fragmentedTrack of this.fragmentedTracks) {
|
|
89
|
+
if (!trackInfo[fragmentedTrack.id]?.complete) {
|
|
90
|
+
return false;
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
return true;
|
|
94
|
+
};
|
|
95
|
+
while (!(finishedReading && allTracksFinished())) {
|
|
96
|
+
for (const fragTrak of this.fragmentedTracks) {
|
|
97
|
+
const trak = fragTrak.trak;
|
|
98
|
+
if (trak.nextSample === void 0) {
|
|
99
|
+
throw new Error("trak.nextSample is undefined");
|
|
100
|
+
}
|
|
101
|
+
if (trak.samples === void 0) {
|
|
102
|
+
throw new Error("trak.samples is undefined");
|
|
103
|
+
}
|
|
104
|
+
while (trak.nextSample < trak.samples.length) {
|
|
105
|
+
let result = void 0;
|
|
106
|
+
const fragTrakNextSample = trak.samples[trak.nextSample];
|
|
107
|
+
if (fragTrakNextSample) {
|
|
108
|
+
fragmentStartSamples[fragTrak.id] ||= fragTrakNextSample;
|
|
109
|
+
}
|
|
110
|
+
try {
|
|
111
|
+
result = this.createFragment(
|
|
112
|
+
fragTrak.id,
|
|
113
|
+
trak.nextSample,
|
|
114
|
+
fragTrak.segmentStream
|
|
115
|
+
);
|
|
116
|
+
} catch (error) {
|
|
117
|
+
console.log("Failed to createFragment", error);
|
|
118
|
+
}
|
|
119
|
+
if (result) {
|
|
120
|
+
fragTrak.segmentStream = result;
|
|
121
|
+
trak.nextSample++;
|
|
122
|
+
} else {
|
|
123
|
+
finishedReading = await this.waitForMoreSamples();
|
|
124
|
+
break;
|
|
125
|
+
}
|
|
126
|
+
const nextSample = trak.samples[trak.nextSample];
|
|
127
|
+
const emitSegment = (
|
|
128
|
+
// if rapAlignement is true, we emit a fragment when we have a rap sample coming up next
|
|
129
|
+
fragTrak.rapAlignement === true && nextSample?.is_sync || // if rapAlignement is false, we emit a fragment when we have the required number of samples
|
|
130
|
+
!fragTrak.rapAlignement && trak.nextSample % fragTrak.nb_samples === 0 || // // if this is the last sample, we emit the fragment
|
|
131
|
+
// finished ||
|
|
132
|
+
// if we have more samples than the number of samples requested, we emit the fragment
|
|
133
|
+
trak.nextSample >= trak.samples.length
|
|
134
|
+
);
|
|
135
|
+
if (emitSegment) {
|
|
136
|
+
const trackInfoForFrag = trackInfo[fragTrak.id];
|
|
137
|
+
if (!trackInfoForFrag) {
|
|
138
|
+
throw new Error("trackInfoForFrag is undefined");
|
|
139
|
+
}
|
|
140
|
+
if (trak.nextSample >= trak.samples.length) {
|
|
141
|
+
trackInfoForFrag.complete = true;
|
|
142
|
+
}
|
|
143
|
+
const startSample = fragmentStartSamples[fragTrak.id];
|
|
144
|
+
const endSample = trak.samples[trak.nextSample - 1];
|
|
145
|
+
if (!startSample || !endSample) {
|
|
146
|
+
throw new Error("startSample or endSample is undefined");
|
|
147
|
+
}
|
|
148
|
+
yield {
|
|
149
|
+
track: fragTrak.id,
|
|
150
|
+
segment: trackInfoForFrag.index,
|
|
151
|
+
data: fragTrak.segmentStream.buffer,
|
|
152
|
+
complete: trackInfoForFrag.complete,
|
|
153
|
+
cts: startSample.cts,
|
|
154
|
+
dts: startSample.dts,
|
|
155
|
+
duration: endSample.dts - startSample.dts + endSample.duration
|
|
156
|
+
};
|
|
157
|
+
trackInfoForFrag.index += 1;
|
|
158
|
+
fragTrak.segmentStream = null;
|
|
159
|
+
delete fragmentStartSamples[fragTrak.id];
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
finishedReading = await this.waitForMoreSamples();
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
waitForMoreSamples() {
|
|
167
|
+
if (this._hasSeenLastSamples) {
|
|
168
|
+
return Promise.resolve(true);
|
|
169
|
+
}
|
|
170
|
+
return new Promise((resolve) => {
|
|
171
|
+
this.waitingForSamples.push(resolve);
|
|
172
|
+
});
|
|
173
|
+
}
|
|
174
|
+
processSamples(last) {
|
|
175
|
+
this._hasSeenLastSamples = last;
|
|
176
|
+
for (const observer of this.waitingForSamples) {
|
|
177
|
+
observer(last);
|
|
178
|
+
}
|
|
179
|
+
this.waitingForSamples = [];
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
exports.MP4File = MP4File;
|
|
@@ -28,7 +28,7 @@ class MP4File extends MP4Box.ISOFile {
|
|
|
28
28
|
* Fragments all tracks in a file into separate array buffers.
|
|
29
29
|
*/
|
|
30
30
|
async fragmentAllTracks() {
|
|
31
|
-
|
|
31
|
+
const trackBuffers = {};
|
|
32
32
|
for await (const segment of this.fragmentIterator()) {
|
|
33
33
|
(trackBuffers[segment.track] ??= []).push(segment.data);
|
|
34
34
|
}
|
|
@@ -82,60 +82,64 @@ class MP4File extends MP4Box.ISOFile {
|
|
|
82
82
|
if (trak.samples === void 0) {
|
|
83
83
|
throw new Error("trak.samples is undefined");
|
|
84
84
|
}
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
85
|
+
while (trak.nextSample < trak.samples.length) {
|
|
86
|
+
let result = void 0;
|
|
87
|
+
const fragTrakNextSample = trak.samples[trak.nextSample];
|
|
88
|
+
if (fragTrakNextSample) {
|
|
89
|
+
fragmentStartSamples[fragTrak.id] ||= fragTrakNextSample;
|
|
90
|
+
}
|
|
91
|
+
try {
|
|
92
|
+
result = this.createFragment(
|
|
93
|
+
fragTrak.id,
|
|
94
|
+
trak.nextSample,
|
|
95
|
+
fragTrak.segmentStream
|
|
96
|
+
);
|
|
97
|
+
} catch (error) {
|
|
98
|
+
console.log("Failed to createFragment", error);
|
|
99
|
+
}
|
|
100
|
+
if (result) {
|
|
101
|
+
fragTrak.segmentStream = result;
|
|
102
|
+
trak.nextSample++;
|
|
103
|
+
} else {
|
|
104
|
+
finishedReading = await this.waitForMoreSamples();
|
|
105
|
+
break;
|
|
106
|
+
}
|
|
107
|
+
const nextSample = trak.samples[trak.nextSample];
|
|
108
|
+
const emitSegment = (
|
|
109
|
+
// if rapAlignement is true, we emit a fragment when we have a rap sample coming up next
|
|
110
|
+
fragTrak.rapAlignement === true && nextSample?.is_sync || // if rapAlignement is false, we emit a fragment when we have the required number of samples
|
|
111
|
+
!fragTrak.rapAlignement && trak.nextSample % fragTrak.nb_samples === 0 || // // if this is the last sample, we emit the fragment
|
|
112
|
+
// finished ||
|
|
113
|
+
// if we have more samples than the number of samples requested, we emit the fragment
|
|
114
|
+
trak.nextSample >= trak.samples.length
|
|
115
|
+
);
|
|
116
|
+
if (emitSegment) {
|
|
117
|
+
const trackInfoForFrag = trackInfo[fragTrak.id];
|
|
118
|
+
if (!trackInfoForFrag) {
|
|
119
|
+
throw new Error("trackInfoForFrag is undefined");
|
|
99
120
|
}
|
|
100
|
-
if (
|
|
101
|
-
|
|
102
|
-
trak.nextSample++;
|
|
103
|
-
} else {
|
|
104
|
-
finishedReading = await this.waitForMoreSamples();
|
|
105
|
-
break eachSample;
|
|
121
|
+
if (trak.nextSample >= trak.samples.length) {
|
|
122
|
+
trackInfoForFrag.complete = true;
|
|
106
123
|
}
|
|
107
|
-
const
|
|
108
|
-
const
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
!fragTrak.rapAlignement && trak.nextSample % fragTrak.nb_samples === 0 || // // if this is the last sample, we emit the fragment
|
|
112
|
-
// finished ||
|
|
113
|
-
// if we have more samples than the number of samples requested, we emit the fragment
|
|
114
|
-
trak.nextSample >= trak.samples.length
|
|
115
|
-
);
|
|
116
|
-
if (emitSegment) {
|
|
117
|
-
if (trak.nextSample >= trak.samples.length) {
|
|
118
|
-
trackInfo[fragTrak.id].complete = true;
|
|
119
|
-
}
|
|
120
|
-
const startSample = fragmentStartSamples[fragTrak.id];
|
|
121
|
-
const endSample = trak.samples[trak.nextSample - 1];
|
|
122
|
-
if (!startSample || !endSample) {
|
|
123
|
-
throw new Error("startSample or endSample is undefined");
|
|
124
|
-
}
|
|
125
|
-
yield {
|
|
126
|
-
track: fragTrak.id,
|
|
127
|
-
segment: trackInfo[fragTrak.id].index,
|
|
128
|
-
data: fragTrak.segmentStream.buffer,
|
|
129
|
-
complete: trackInfo[fragTrak.id].complete,
|
|
130
|
-
cts: startSample.cts,
|
|
131
|
-
dts: startSample.dts,
|
|
132
|
-
duration: endSample.dts - startSample.dts + endSample.duration
|
|
133
|
-
};
|
|
134
|
-
trackInfo[fragTrak.id].index += 1;
|
|
135
|
-
fragTrak.segmentStream = null;
|
|
136
|
-
delete fragmentStartSamples[fragTrak.id];
|
|
124
|
+
const startSample = fragmentStartSamples[fragTrak.id];
|
|
125
|
+
const endSample = trak.samples[trak.nextSample - 1];
|
|
126
|
+
if (!startSample || !endSample) {
|
|
127
|
+
throw new Error("startSample or endSample is undefined");
|
|
137
128
|
}
|
|
129
|
+
yield {
|
|
130
|
+
track: fragTrak.id,
|
|
131
|
+
segment: trackInfoForFrag.index,
|
|
132
|
+
data: fragTrak.segmentStream.buffer,
|
|
133
|
+
complete: trackInfoForFrag.complete,
|
|
134
|
+
cts: startSample.cts,
|
|
135
|
+
dts: startSample.dts,
|
|
136
|
+
duration: endSample.dts - startSample.dts + endSample.duration
|
|
137
|
+
};
|
|
138
|
+
trackInfoForFrag.index += 1;
|
|
139
|
+
fragTrak.segmentStream = null;
|
|
140
|
+
delete fragmentStartSamples[fragTrak.id];
|
|
138
141
|
}
|
|
142
|
+
}
|
|
139
143
|
}
|
|
140
144
|
finishedReading = await this.waitForMoreSamples();
|
|
141
145
|
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
|
+
const msToTimeCode = (ms, subSecond = false) => {
|
|
4
|
+
const seconds = Math.floor(ms / 1e3);
|
|
5
|
+
const minutes = Math.floor(seconds / 60);
|
|
6
|
+
const hours = Math.floor(minutes / 60);
|
|
7
|
+
const pad = (num) => num.toString().padStart(2, "0");
|
|
8
|
+
let timecode = `${pad(hours)}:${pad(minutes % 60)}:${pad(seconds % 60)}`;
|
|
9
|
+
if (subSecond) {
|
|
10
|
+
const subSeconds = Math.floor(ms % 1e3 / 10);
|
|
11
|
+
timecode += `.${subSeconds.toString().padStart(2, "0")}`;
|
|
12
|
+
}
|
|
13
|
+
return timecode;
|
|
14
|
+
};
|
|
15
|
+
exports.msToTimeCode = msToTimeCode;
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
|
+
const memoize = (_target, _propertyKey, descriptor) => {
|
|
4
|
+
const get = descriptor.get;
|
|
5
|
+
if (!get) return;
|
|
6
|
+
const memoized = /* @__PURE__ */ new WeakMap();
|
|
7
|
+
descriptor.get = function() {
|
|
8
|
+
if (!memoized.has(this)) {
|
|
9
|
+
memoized.set(this, get.call(this));
|
|
10
|
+
}
|
|
11
|
+
return memoized.get(this);
|
|
12
|
+
};
|
|
13
|
+
};
|
|
14
|
+
exports.memoize = memoize;
|