@editframe/elements 0.5.0-beta.4 → 0.5.0-beta.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/editor/msToTimeCode.mjs +15 -0
- package/dist/editor/util/EncodedAsset/EncodedAsset.mjs +537 -0
- package/dist/editor/util/MP4File.mjs +161 -0
- package/dist/elements/elements/CrossUpdateController.mjs +16 -0
- package/dist/elements/elements/EFAudio.mjs +37 -0
- package/dist/elements/elements/EFCaptions.mjs +172 -0
- package/dist/elements/elements/EFImage.mjs +67 -0
- package/dist/elements/elements/EFMedia.mjs +255 -0
- package/dist/elements/elements/EFSourceMixin.mjs +57 -0
- package/dist/elements/elements/EFTemporal.mjs +186 -0
- package/dist/elements/elements/EFTimegroup.mjs +230 -0
- package/dist/elements/elements/EFTimeline.mjs +12 -0
- package/dist/elements/elements/EFVideo.mjs +123 -0
- package/dist/elements/elements/EFWaveform.mjs +203 -0
- package/dist/elements/elements/FetchMixin.mjs +30 -0
- package/dist/elements/elements/TimegroupController.mjs +20 -0
- package/dist/elements/elements/durationConverter.mjs +8 -0
- package/dist/elements/elements/parseTimeToMs.mjs +13 -0
- package/dist/elements/elements/util.mjs +11 -0
- package/dist/elements/elements.css.mjs +1 -0
- package/dist/elements/elements.mjs +11 -0
- package/dist/elements/gui/EFFilmstrip.mjs +680 -0
- package/dist/elements/gui/EFWorkbench.mjs +234 -0
- package/dist/elements/gui/TWMixin.css.mjs +4 -0
- package/dist/elements/gui/TWMixin.mjs +27 -0
- package/dist/style.css +754 -0
- package/dist/util/awaitMicrotask.mjs +8 -0
- package/dist/util/memoize.mjs +15 -0
- package/package.json +9 -2
- package/dist/editframe-elements.css +0 -1
- package/dist/editframe-elements.mjs +0 -9089
- package/dist/editframe-elements.umd.js +0 -288
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
const msToTimeCode = (ms, subSecond = false) => {
|
|
2
|
+
const seconds = Math.floor(ms / 1e3);
|
|
3
|
+
const minutes = Math.floor(seconds / 60);
|
|
4
|
+
const hours = Math.floor(minutes / 60);
|
|
5
|
+
const pad = (num) => num.toString().padStart(2, "0");
|
|
6
|
+
let timecode = `${pad(hours)}:${pad(minutes % 60)}:${pad(seconds % 60)}`;
|
|
7
|
+
if (subSecond) {
|
|
8
|
+
const subSeconds = Math.floor(ms % 1e3 / 10);
|
|
9
|
+
timecode += `.${subSeconds.toString().padStart(2, "0")}`;
|
|
10
|
+
}
|
|
11
|
+
return timecode;
|
|
12
|
+
};
|
|
13
|
+
export {
|
|
14
|
+
msToTimeCode
|
|
15
|
+
};
|
|
@@ -0,0 +1,537 @@
|
|
|
1
|
+
import { memoize } from "../../../util/memoize.mjs";
|
|
2
|
+
import * as MP4Box from "mp4box";
|
|
3
|
+
import { MP4File } from "../MP4File.mjs";
|
|
4
|
+
var __defProp = Object.defineProperty;
|
|
5
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
6
|
+
var __decorateClass = (decorators, target, key, kind) => {
|
|
7
|
+
var result = kind > 1 ? void 0 : kind ? __getOwnPropDesc(target, key) : target;
|
|
8
|
+
for (var i = decorators.length - 1, decorator; i >= 0; i--)
|
|
9
|
+
if (decorator = decorators[i])
|
|
10
|
+
result = (kind ? decorator(target, key, result) : decorator(result)) || result;
|
|
11
|
+
if (kind && result)
|
|
12
|
+
__defProp(target, key, result);
|
|
13
|
+
return result;
|
|
14
|
+
};
|
|
15
|
+
const BUFFER_SIZE = 10;
|
|
16
|
+
class AssetNotAvailableLocally extends Error {
|
|
17
|
+
}
|
|
18
|
+
class FileAsset {
|
|
19
|
+
constructor(localName, file) {
|
|
20
|
+
this.localName = localName;
|
|
21
|
+
this.file = file;
|
|
22
|
+
}
|
|
23
|
+
async arrayBuffer() {
|
|
24
|
+
return this.file.arrayBuffer();
|
|
25
|
+
}
|
|
26
|
+
get byteSize() {
|
|
27
|
+
return this.file.size;
|
|
28
|
+
}
|
|
29
|
+
get fileExtension() {
|
|
30
|
+
return this.file.name.split(".").pop();
|
|
31
|
+
}
|
|
32
|
+
slice(start, end) {
|
|
33
|
+
return this.file.slice(start, end);
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
class ISOFileAsset extends FileAsset {
|
|
37
|
+
constructor(localName, file, mp4boxFile) {
|
|
38
|
+
super(localName, file);
|
|
39
|
+
this.localName = localName;
|
|
40
|
+
this.file = file;
|
|
41
|
+
this.mp4boxFile = mp4boxFile;
|
|
42
|
+
}
|
|
43
|
+
get fileInfo() {
|
|
44
|
+
return this.mp4boxFile.getInfo();
|
|
45
|
+
}
|
|
46
|
+
get containerFormat() {
|
|
47
|
+
return "mp4";
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
__decorateClass([
|
|
51
|
+
memoize
|
|
52
|
+
], ISOFileAsset.prototype, "fileInfo", 1);
|
|
53
|
+
const _VideoAsset = class _VideoAsset2 extends ISOFileAsset {
|
|
54
|
+
constructor(localName, mp4boxFile, file) {
|
|
55
|
+
super(localName, file, mp4boxFile);
|
|
56
|
+
this.decodedFrames = [];
|
|
57
|
+
this.requestedSampleNumber = 0;
|
|
58
|
+
this.outCursor = 0;
|
|
59
|
+
this.sampleCursor = 0;
|
|
60
|
+
this.eventListeners = {};
|
|
61
|
+
this.latestSeekCts = 0;
|
|
62
|
+
this.videoDecoder = new VideoDecoder({
|
|
63
|
+
error: (e) => {
|
|
64
|
+
console.error(e);
|
|
65
|
+
},
|
|
66
|
+
// eslint-disable-next-line @typescript-eslint/no-misused-promises
|
|
67
|
+
output: async (decodedFrame) => {
|
|
68
|
+
const clone = decodedFrame.clone();
|
|
69
|
+
this.decodedFrames.push(clone);
|
|
70
|
+
this.pruneBuffer();
|
|
71
|
+
decodedFrame.close();
|
|
72
|
+
this.outCursor = this.samples.findIndex(
|
|
73
|
+
(sample) => sample.cts === decodedFrame.timestamp
|
|
74
|
+
);
|
|
75
|
+
this.emit("frame", clone);
|
|
76
|
+
}
|
|
77
|
+
});
|
|
78
|
+
this.configureDecoder();
|
|
79
|
+
}
|
|
80
|
+
static async createFromReadableStream(id, stream, file) {
|
|
81
|
+
let fileStart = 0;
|
|
82
|
+
const inputFile = new MP4File();
|
|
83
|
+
const reader = stream.getReader();
|
|
84
|
+
const processChunk = ({
|
|
85
|
+
done,
|
|
86
|
+
value
|
|
87
|
+
}) => {
|
|
88
|
+
if (done) {
|
|
89
|
+
return;
|
|
90
|
+
}
|
|
91
|
+
if (!value) {
|
|
92
|
+
return;
|
|
93
|
+
}
|
|
94
|
+
const mp4buffer = value.buffer;
|
|
95
|
+
mp4buffer.fileStart = fileStart;
|
|
96
|
+
const isLast = file.size === fileStart + value.byteLength;
|
|
97
|
+
inputFile.appendBuffer(mp4buffer, isLast);
|
|
98
|
+
fileStart += value.byteLength;
|
|
99
|
+
return reader.read().then(processChunk);
|
|
100
|
+
};
|
|
101
|
+
await reader.read().then(processChunk);
|
|
102
|
+
return new _VideoAsset2(id, inputFile, file);
|
|
103
|
+
}
|
|
104
|
+
/**
|
|
105
|
+
* **Only use this function in tests to reset a VideoAsset to its initial state.**
|
|
106
|
+
*
|
|
107
|
+
* @deprecated
|
|
108
|
+
*/
|
|
109
|
+
async TEST_ONLY_RESET() {
|
|
110
|
+
await this.videoDecoder.flush();
|
|
111
|
+
this.configureDecoder();
|
|
112
|
+
this.requestedSampleNumber = 0;
|
|
113
|
+
this.outCursor = 0;
|
|
114
|
+
this.sampleCursor = 0;
|
|
115
|
+
this.decodedFrames.forEach((frame) => {
|
|
116
|
+
frame.close();
|
|
117
|
+
});
|
|
118
|
+
this.decodedFrames = [];
|
|
119
|
+
this.lastDecodedSample = void 0;
|
|
120
|
+
this.lastSoughtFrame = void 0;
|
|
121
|
+
}
|
|
122
|
+
addEventListener(type, callback) {
|
|
123
|
+
this.eventListeners[type] ||= /* @__PURE__ */ new Set();
|
|
124
|
+
this.eventListeners[type]?.add(callback);
|
|
125
|
+
}
|
|
126
|
+
removeEventListener(type, callback) {
|
|
127
|
+
this.eventListeners[type]?.delete(callback);
|
|
128
|
+
}
|
|
129
|
+
emit(type, ...args) {
|
|
130
|
+
this.eventListeners[type]?.forEach((listener) => {
|
|
131
|
+
listener(...args);
|
|
132
|
+
});
|
|
133
|
+
}
|
|
134
|
+
get videoCodec() {
|
|
135
|
+
if (!this.defaultVideoTrack) {
|
|
136
|
+
throw new Error("No default video track found");
|
|
137
|
+
}
|
|
138
|
+
return this.defaultVideoTrack?.codec;
|
|
139
|
+
}
|
|
140
|
+
get fragmentInfo() {
|
|
141
|
+
const fragments = [];
|
|
142
|
+
const [first, ...samples] = this.samples;
|
|
143
|
+
if (!first) {
|
|
144
|
+
return fragments;
|
|
145
|
+
}
|
|
146
|
+
let currentFragment = {
|
|
147
|
+
offset: first.offset,
|
|
148
|
+
size: first.size,
|
|
149
|
+
start_ms: first.cts,
|
|
150
|
+
duration_ms: 0
|
|
151
|
+
};
|
|
152
|
+
for (const sample of samples) {
|
|
153
|
+
if (sample.is_sync) {
|
|
154
|
+
if (currentFragment) {
|
|
155
|
+
currentFragment.duration_ms = sample.cts - currentFragment.start_ms;
|
|
156
|
+
fragments.push(currentFragment);
|
|
157
|
+
}
|
|
158
|
+
currentFragment = {
|
|
159
|
+
offset: sample.offset,
|
|
160
|
+
size: sample.size,
|
|
161
|
+
start_ms: sample.cts,
|
|
162
|
+
duration_ms: 0
|
|
163
|
+
};
|
|
164
|
+
} else {
|
|
165
|
+
currentFragment.size += sample.size;
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
return fragments;
|
|
169
|
+
}
|
|
170
|
+
pruneBuffer() {
|
|
171
|
+
if (this.decodedFrames.length > BUFFER_SIZE) {
|
|
172
|
+
this.decodedFrames.shift()?.close();
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
prettyPrint() {
|
|
176
|
+
const samplesInEncoder = this.sampleCursor - this.outCursor;
|
|
177
|
+
const cachedSamples = this.decodedFrames.length;
|
|
178
|
+
console.log(
|
|
179
|
+
Array.from({
|
|
180
|
+
length: this.sampleCursor - samplesInEncoder - cachedSamples
|
|
181
|
+
}).fill("⬜️").join(" "),
|
|
182
|
+
Array.from({ length: cachedSamples }).fill("🟩").join(" "),
|
|
183
|
+
Array.from({
|
|
184
|
+
length: samplesInEncoder
|
|
185
|
+
}).fill("🟨").join(" ")
|
|
186
|
+
);
|
|
187
|
+
}
|
|
188
|
+
get editsOffset() {
|
|
189
|
+
if (!this.defaultVideoTrack?.edits) {
|
|
190
|
+
return 0;
|
|
191
|
+
}
|
|
192
|
+
return this.defaultVideoTrack.edits.reduce((acc, edit) => {
|
|
193
|
+
return acc + edit.media_time;
|
|
194
|
+
}, 0);
|
|
195
|
+
}
|
|
196
|
+
async waitUntilVideoQueueDrained() {
|
|
197
|
+
if (this.videoDecoder.decodeQueueSize === 0) {
|
|
198
|
+
return;
|
|
199
|
+
}
|
|
200
|
+
await new Promise((resolve) => {
|
|
201
|
+
this.videoDecoder.addEventListener(
|
|
202
|
+
"dequeue",
|
|
203
|
+
() => {
|
|
204
|
+
resolve();
|
|
205
|
+
},
|
|
206
|
+
{ once: true }
|
|
207
|
+
);
|
|
208
|
+
});
|
|
209
|
+
await this.waitUntilVideoQueueDrained();
|
|
210
|
+
}
|
|
211
|
+
get canDecodeNextSample() {
|
|
212
|
+
return this.sampleCursor < this.samples.length;
|
|
213
|
+
}
|
|
214
|
+
async decodeNextSample() {
|
|
215
|
+
if (!this.canDecodeNextSample) {
|
|
216
|
+
throw new Error("No more samples to decode");
|
|
217
|
+
}
|
|
218
|
+
await this.decodeSlice(this.sampleCursor, this.sampleCursor);
|
|
219
|
+
this.sampleCursor++;
|
|
220
|
+
}
|
|
221
|
+
async decodeSlice(start, end) {
|
|
222
|
+
const samples = this.samples.slice(start, end + 1);
|
|
223
|
+
const sliceStart = samples[0].offset;
|
|
224
|
+
const sliceEnd = samples[samples.length - 1].offset + samples[samples.length - 1].size;
|
|
225
|
+
const buffer = await this.file.slice(sliceStart, sliceEnd).arrayBuffer();
|
|
226
|
+
const firstSampleOffset = samples[0].offset;
|
|
227
|
+
for (let i = start; i <= end; i++) {
|
|
228
|
+
await this.waitUntilVideoQueueDrained();
|
|
229
|
+
const sample = this.getSample(i);
|
|
230
|
+
const sampleStart = sample.offset - firstSampleOffset;
|
|
231
|
+
const sampleEnd = sample.offset + sample.size - firstSampleOffset;
|
|
232
|
+
const chunk = new EncodedVideoChunk({
|
|
233
|
+
data: buffer.slice(sampleStart, sampleEnd),
|
|
234
|
+
timestamp: sample.cts,
|
|
235
|
+
duration: sample.duration,
|
|
236
|
+
type: sample.is_sync ? "key" : "delta"
|
|
237
|
+
});
|
|
238
|
+
this.videoDecoder.decode(chunk);
|
|
239
|
+
const nextSample = this.defaultVideoTrak?.samples?.[i + 1];
|
|
240
|
+
if (nextSample === void 0) {
|
|
241
|
+
await this.videoDecoder.flush();
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
get decoderConfiguration() {
|
|
246
|
+
if (!this.defaultVideoTrack) {
|
|
247
|
+
throw new Error("No default video track found");
|
|
248
|
+
}
|
|
249
|
+
let description;
|
|
250
|
+
const trak = this.mp4boxFile.getTrackById(this.defaultVideoTrack.id);
|
|
251
|
+
for (const entry of trak.mdia.minf.stbl.stsd.entries) {
|
|
252
|
+
if (entry.avcC ?? entry.hvcC) {
|
|
253
|
+
const stream = new MP4Box.DataStream(
|
|
254
|
+
void 0,
|
|
255
|
+
0,
|
|
256
|
+
MP4Box.DataStream.BIG_ENDIAN
|
|
257
|
+
);
|
|
258
|
+
if (entry.avcC) {
|
|
259
|
+
entry.avcC.write(stream);
|
|
260
|
+
} else {
|
|
261
|
+
entry.hvcC.write(stream);
|
|
262
|
+
}
|
|
263
|
+
description = new Uint8Array(stream.buffer, 8);
|
|
264
|
+
break;
|
|
265
|
+
}
|
|
266
|
+
}
|
|
267
|
+
return {
|
|
268
|
+
codec: this.defaultVideoTrack.codec,
|
|
269
|
+
codedWidth: this.defaultVideoTrack.track_width,
|
|
270
|
+
codedHeight: this.defaultVideoTrack.track_height,
|
|
271
|
+
// hardwareAcceleration: "prefer-hardware",
|
|
272
|
+
optimizeForLatency: true,
|
|
273
|
+
description
|
|
274
|
+
};
|
|
275
|
+
}
|
|
276
|
+
/**
|
|
277
|
+
* Configures the video decoder with the appropriate codec, dimensions, and hardware acceleration settings.
|
|
278
|
+
* If the decoder is already configured, it will be reset before being reconfigured.
|
|
279
|
+
*/
|
|
280
|
+
configureDecoder() {
|
|
281
|
+
if (this.videoDecoder.state === "configured") {
|
|
282
|
+
this.videoDecoder.reset();
|
|
283
|
+
}
|
|
284
|
+
this.videoDecoder.configure(this.decoderConfiguration);
|
|
285
|
+
}
|
|
286
|
+
// Default to -1 to throw error if called without an index
|
|
287
|
+
getSample(index = -1) {
|
|
288
|
+
const sample = this.samples?.[index];
|
|
289
|
+
if (!sample) {
|
|
290
|
+
throw new Error("Sample not found at index " + index);
|
|
291
|
+
}
|
|
292
|
+
return sample;
|
|
293
|
+
}
|
|
294
|
+
get timescale() {
|
|
295
|
+
if (!this.defaultVideoTrack) {
|
|
296
|
+
throw new Error("No default video track found");
|
|
297
|
+
}
|
|
298
|
+
return this.defaultVideoTrack.timescale;
|
|
299
|
+
}
|
|
300
|
+
get samples() {
|
|
301
|
+
if (!this.defaultVideoTrak.samples) {
|
|
302
|
+
throw new Error("No video samples found");
|
|
303
|
+
}
|
|
304
|
+
return this.defaultVideoTrak.samples;
|
|
305
|
+
}
|
|
306
|
+
get displayOrderedSamples() {
|
|
307
|
+
return Array.from(this.samples).sort((a, b) => {
|
|
308
|
+
return a.cts - b.cts;
|
|
309
|
+
});
|
|
310
|
+
}
|
|
311
|
+
getSampleClosetToTime(seconds) {
|
|
312
|
+
const targetTime = Math.round(seconds * this.timescale + this.editsOffset);
|
|
313
|
+
const sampleIndex = this.displayOrderedSamples.findIndex(
|
|
314
|
+
(sample) => sample.cts >= targetTime
|
|
315
|
+
);
|
|
316
|
+
if (sampleIndex === -1) {
|
|
317
|
+
return this.displayOrderedSamples[this.displayOrderedSamples.length - 1];
|
|
318
|
+
}
|
|
319
|
+
return this.displayOrderedSamples[sampleIndex];
|
|
320
|
+
}
|
|
321
|
+
seekingWillEmitNewFrame(seconds) {
|
|
322
|
+
if (!this.lastSoughtFrame) {
|
|
323
|
+
return true;
|
|
324
|
+
}
|
|
325
|
+
if (this.seekingWillGoBackwards(seconds)) {
|
|
326
|
+
return true;
|
|
327
|
+
}
|
|
328
|
+
const nextCts = this.getSampleClosetToTime(seconds).cts;
|
|
329
|
+
return nextCts > this.lastSoughtFrame.timestamp;
|
|
330
|
+
}
|
|
331
|
+
seekingWillSkipPictureGroup(seconds) {
|
|
332
|
+
let start = this.sampleCursor;
|
|
333
|
+
const end = this.getSampleClosetToTime(seconds).number;
|
|
334
|
+
let syncFrameCrossings = 0;
|
|
335
|
+
while (start <= end) {
|
|
336
|
+
const sample = this.getSample(start);
|
|
337
|
+
if (sample.is_sync) {
|
|
338
|
+
if (syncFrameCrossings > 1) {
|
|
339
|
+
return true;
|
|
340
|
+
}
|
|
341
|
+
syncFrameCrossings++;
|
|
342
|
+
}
|
|
343
|
+
start++;
|
|
344
|
+
}
|
|
345
|
+
return false;
|
|
346
|
+
}
|
|
347
|
+
seekingWillGoBackwards(seconds) {
|
|
348
|
+
const targetSample = this.getSampleClosetToTime(seconds);
|
|
349
|
+
const targetIndex = this.displayOrderedSamples.indexOf(targetSample);
|
|
350
|
+
const targetInCache = this.decodedFrames.find(
|
|
351
|
+
(frame) => frame.timestamp === targetSample.cts
|
|
352
|
+
);
|
|
353
|
+
const atEnd = this.sampleCursor === this.samples.length - 1;
|
|
354
|
+
if (atEnd) {
|
|
355
|
+
return false;
|
|
356
|
+
}
|
|
357
|
+
if (targetInCache) {
|
|
358
|
+
return false;
|
|
359
|
+
}
|
|
360
|
+
return this.outCursor > targetIndex;
|
|
361
|
+
}
|
|
362
|
+
async seekToTime(seconds) {
|
|
363
|
+
const sample = this.getSampleClosetToTime(seconds);
|
|
364
|
+
const cts = sample.cts;
|
|
365
|
+
this.latestSeekCts = cts;
|
|
366
|
+
const alreadyDecodedFrame = this.decodedFrames.find(
|
|
367
|
+
(f) => f.timestamp === cts
|
|
368
|
+
);
|
|
369
|
+
if (alreadyDecodedFrame) {
|
|
370
|
+
return alreadyDecodedFrame;
|
|
371
|
+
}
|
|
372
|
+
if (this.seekingWillSkipPictureGroup(seconds)) {
|
|
373
|
+
await this.videoDecoder.flush();
|
|
374
|
+
let syncSampleNumber = sample.number;
|
|
375
|
+
while (!this.getSample(syncSampleNumber).is_sync) {
|
|
376
|
+
syncSampleNumber--;
|
|
377
|
+
}
|
|
378
|
+
this.sampleCursor = syncSampleNumber;
|
|
379
|
+
}
|
|
380
|
+
if (this.seekingWillGoBackwards(seconds)) {
|
|
381
|
+
console.log("BACKWARDS FLUSH");
|
|
382
|
+
await this.videoDecoder.flush();
|
|
383
|
+
this.decodedFrames.forEach((frame2) => {
|
|
384
|
+
frame2.close();
|
|
385
|
+
});
|
|
386
|
+
this.decodedFrames = [];
|
|
387
|
+
let syncSampleNumber = sample.number;
|
|
388
|
+
while (!this.getSample(syncSampleNumber).is_sync) {
|
|
389
|
+
syncSampleNumber--;
|
|
390
|
+
}
|
|
391
|
+
this.sampleCursor = syncSampleNumber;
|
|
392
|
+
}
|
|
393
|
+
let frame;
|
|
394
|
+
const maybeFrame = (_frame) => {
|
|
395
|
+
if (frame) {
|
|
396
|
+
return;
|
|
397
|
+
}
|
|
398
|
+
if (_frame.timestamp === cts) {
|
|
399
|
+
this.removeEventListener("frame", maybeFrame);
|
|
400
|
+
frame = _frame;
|
|
401
|
+
}
|
|
402
|
+
};
|
|
403
|
+
this.addEventListener("frame", maybeFrame);
|
|
404
|
+
while (frame === void 0 && this.canDecodeNextSample) {
|
|
405
|
+
await this.decodeNextSample();
|
|
406
|
+
}
|
|
407
|
+
this.removeEventListener("frame", maybeFrame);
|
|
408
|
+
if (frame) {
|
|
409
|
+
this.lastSoughtFrame = frame;
|
|
410
|
+
}
|
|
411
|
+
return frame;
|
|
412
|
+
}
|
|
413
|
+
get defaultVideoTrack() {
|
|
414
|
+
return this.fileInfo.videoTracks[0];
|
|
415
|
+
}
|
|
416
|
+
get defaultVideoTrak() {
|
|
417
|
+
return this.mp4boxFile.getTrackById(this.defaultVideoTrack?.id ?? -1);
|
|
418
|
+
}
|
|
419
|
+
get duration() {
|
|
420
|
+
return this.fileInfo.duration / this.fileInfo.timescale;
|
|
421
|
+
}
|
|
422
|
+
};
|
|
423
|
+
__decorateClass([
|
|
424
|
+
memoize
|
|
425
|
+
], _VideoAsset.prototype, "editsOffset", 1);
|
|
426
|
+
__decorateClass([
|
|
427
|
+
memoize
|
|
428
|
+
], _VideoAsset.prototype, "timescale", 1);
|
|
429
|
+
__decorateClass([
|
|
430
|
+
memoize
|
|
431
|
+
], _VideoAsset.prototype, "samples", 1);
|
|
432
|
+
__decorateClass([
|
|
433
|
+
memoize
|
|
434
|
+
], _VideoAsset.prototype, "displayOrderedSamples", 1);
|
|
435
|
+
__decorateClass([
|
|
436
|
+
memoize
|
|
437
|
+
], _VideoAsset.prototype, "defaultVideoTrack", 1);
|
|
438
|
+
__decorateClass([
|
|
439
|
+
memoize
|
|
440
|
+
], _VideoAsset.prototype, "defaultVideoTrak", 1);
|
|
441
|
+
__decorateClass([
|
|
442
|
+
memoize
|
|
443
|
+
], _VideoAsset.prototype, "duration", 1);
|
|
444
|
+
let VideoAsset = _VideoAsset;
|
|
445
|
+
const _AudioAsset = class _AudioAsset2 extends ISOFileAsset {
|
|
446
|
+
static async createFromReadableStream(id, stream, file) {
|
|
447
|
+
let fileStart = 0;
|
|
448
|
+
const inputFile = new MP4File();
|
|
449
|
+
const reader = stream.getReader();
|
|
450
|
+
const processChunk = ({
|
|
451
|
+
done,
|
|
452
|
+
value
|
|
453
|
+
}) => {
|
|
454
|
+
if (done) {
|
|
455
|
+
return;
|
|
456
|
+
}
|
|
457
|
+
if (!value) {
|
|
458
|
+
return;
|
|
459
|
+
}
|
|
460
|
+
const mp4buffer = value.buffer;
|
|
461
|
+
mp4buffer.fileStart = fileStart;
|
|
462
|
+
fileStart += value.byteLength;
|
|
463
|
+
inputFile.appendBuffer(mp4buffer);
|
|
464
|
+
return reader.read().then(processChunk);
|
|
465
|
+
};
|
|
466
|
+
await reader.read().then(processChunk);
|
|
467
|
+
return new _AudioAsset2(id, file, inputFile);
|
|
468
|
+
}
|
|
469
|
+
get defaultAudioTrack() {
|
|
470
|
+
return this.fileInfo.audioTracks[0];
|
|
471
|
+
}
|
|
472
|
+
get defaultAudioTrak() {
|
|
473
|
+
return this.mp4boxFile.getTrackById(this.defaultAudioTrack?.id ?? -1);
|
|
474
|
+
}
|
|
475
|
+
get audioCodec() {
|
|
476
|
+
if (!this.defaultAudioTrack) {
|
|
477
|
+
throw new Error("No default audio track found");
|
|
478
|
+
}
|
|
479
|
+
return this.defaultAudioTrack.codec;
|
|
480
|
+
}
|
|
481
|
+
get samplerate() {
|
|
482
|
+
if (!this.defaultAudioTrack) {
|
|
483
|
+
throw new Error("No default audio track found");
|
|
484
|
+
}
|
|
485
|
+
return this.defaultAudioTrack.audio.sample_rate;
|
|
486
|
+
}
|
|
487
|
+
get channelCount() {
|
|
488
|
+
if (!this.defaultAudioTrack) {
|
|
489
|
+
throw new Error("No default audio track found");
|
|
490
|
+
}
|
|
491
|
+
return this.defaultAudioTrack.audio.channel_count;
|
|
492
|
+
}
|
|
493
|
+
};
|
|
494
|
+
__decorateClass([
|
|
495
|
+
memoize
|
|
496
|
+
], _AudioAsset.prototype, "defaultAudioTrack", 1);
|
|
497
|
+
__decorateClass([
|
|
498
|
+
memoize
|
|
499
|
+
], _AudioAsset.prototype, "defaultAudioTrak", 1);
|
|
500
|
+
__decorateClass([
|
|
501
|
+
memoize
|
|
502
|
+
], _AudioAsset.prototype, "audioCodec", 1);
|
|
503
|
+
__decorateClass([
|
|
504
|
+
memoize
|
|
505
|
+
], _AudioAsset.prototype, "samplerate", 1);
|
|
506
|
+
__decorateClass([
|
|
507
|
+
memoize
|
|
508
|
+
], _AudioAsset.prototype, "channelCount", 1);
|
|
509
|
+
const _ImageAsset = class _ImageAsset2 extends FileAsset {
|
|
510
|
+
static async createFromReadableStream(id, file) {
|
|
511
|
+
if (file.size === 0) {
|
|
512
|
+
throw new AssetNotAvailableLocally();
|
|
513
|
+
}
|
|
514
|
+
return new _ImageAsset2(id, file);
|
|
515
|
+
}
|
|
516
|
+
get objectUrl() {
|
|
517
|
+
return URL.createObjectURL(this.file);
|
|
518
|
+
}
|
|
519
|
+
get format() {
|
|
520
|
+
return this.fileExtension;
|
|
521
|
+
}
|
|
522
|
+
get type() {
|
|
523
|
+
return `image/${this.format}`;
|
|
524
|
+
}
|
|
525
|
+
};
|
|
526
|
+
__decorateClass([
|
|
527
|
+
memoize
|
|
528
|
+
], _ImageAsset.prototype, "objectUrl", 1);
|
|
529
|
+
__decorateClass([
|
|
530
|
+
memoize
|
|
531
|
+
], _ImageAsset.prototype, "format", 1);
|
|
532
|
+
export {
|
|
533
|
+
AssetNotAvailableLocally,
|
|
534
|
+
FileAsset,
|
|
535
|
+
ISOFileAsset,
|
|
536
|
+
VideoAsset
|
|
537
|
+
};
|
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
import * as MP4Box from "mp4box";
|
|
2
|
+
class MP4File extends MP4Box.ISOFile {
|
|
3
|
+
constructor() {
|
|
4
|
+
super(...arguments);
|
|
5
|
+
this.readyPromise = new Promise((resolve, reject) => {
|
|
6
|
+
this.onReady = () => resolve();
|
|
7
|
+
this.onError = reject;
|
|
8
|
+
});
|
|
9
|
+
this.waitingForSamples = [];
|
|
10
|
+
this._hasSeenLastSamples = false;
|
|
11
|
+
this._arrayBufferFileStart = 0;
|
|
12
|
+
}
|
|
13
|
+
setSegmentOptions(id, user, options) {
|
|
14
|
+
const trak = this.getTrackById(id);
|
|
15
|
+
if (trak) {
|
|
16
|
+
trak.nextSample = 0;
|
|
17
|
+
this.fragmentedTracks.push({
|
|
18
|
+
id,
|
|
19
|
+
user,
|
|
20
|
+
trak,
|
|
21
|
+
segmentStream: null,
|
|
22
|
+
nb_samples: "nbSamples" in options && options.nbSamples || 1e3,
|
|
23
|
+
rapAlignement: ("rapAlignement" in options && options.rapAlignement) ?? true
|
|
24
|
+
});
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
/**
|
|
28
|
+
* Fragments all tracks in a file into separate array buffers.
|
|
29
|
+
*/
|
|
30
|
+
async fragmentAllTracks() {
|
|
31
|
+
let trackBuffers = {};
|
|
32
|
+
for await (const segment of this.fragmentIterator()) {
|
|
33
|
+
(trackBuffers[segment.track] ??= []).push(segment.data);
|
|
34
|
+
}
|
|
35
|
+
return trackBuffers;
|
|
36
|
+
}
|
|
37
|
+
async *fragmentIterator() {
|
|
38
|
+
await this.readyPromise;
|
|
39
|
+
const trackInfo = {};
|
|
40
|
+
for (const videoTrack of this.getInfo().videoTracks) {
|
|
41
|
+
trackInfo[videoTrack.id] = { index: 0, complete: false };
|
|
42
|
+
this.setSegmentOptions(videoTrack.id, null, {
|
|
43
|
+
rapAlignement: true
|
|
44
|
+
});
|
|
45
|
+
}
|
|
46
|
+
for (const audioTrack of this.getInfo().audioTracks) {
|
|
47
|
+
trackInfo[audioTrack.id] = { index: 0, complete: false };
|
|
48
|
+
const sampleRate = audioTrack.audio.sample_rate;
|
|
49
|
+
const probablePacketSize = 1024;
|
|
50
|
+
const probableFourSecondsOfSamples = Math.ceil(
|
|
51
|
+
sampleRate / probablePacketSize * 4
|
|
52
|
+
);
|
|
53
|
+
this.setSegmentOptions(audioTrack.id, null, {
|
|
54
|
+
nbSamples: probableFourSecondsOfSamples
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
const initSegments = this.initializeSegmentation();
|
|
58
|
+
for (const initSegment of initSegments) {
|
|
59
|
+
yield {
|
|
60
|
+
track: initSegment.id,
|
|
61
|
+
segment: "init",
|
|
62
|
+
data: initSegment.buffer,
|
|
63
|
+
complete: false
|
|
64
|
+
};
|
|
65
|
+
}
|
|
66
|
+
const fragmentStartSamples = {};
|
|
67
|
+
let finishedReading = false;
|
|
68
|
+
const allTracksFinished = () => {
|
|
69
|
+
for (const fragmentedTrack of this.fragmentedTracks) {
|
|
70
|
+
if (!trackInfo[fragmentedTrack.id]?.complete) {
|
|
71
|
+
return false;
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
return true;
|
|
75
|
+
};
|
|
76
|
+
while (!(finishedReading && allTracksFinished())) {
|
|
77
|
+
for (const fragTrak of this.fragmentedTracks) {
|
|
78
|
+
const trak = fragTrak.trak;
|
|
79
|
+
if (trak.nextSample === void 0) {
|
|
80
|
+
throw new Error("trak.nextSample is undefined");
|
|
81
|
+
}
|
|
82
|
+
if (trak.samples === void 0) {
|
|
83
|
+
throw new Error("trak.samples is undefined");
|
|
84
|
+
}
|
|
85
|
+
eachSample:
|
|
86
|
+
while (trak.nextSample < trak.samples.length) {
|
|
87
|
+
let result = void 0;
|
|
88
|
+
if (trak?.samples[trak.nextSample]) {
|
|
89
|
+
fragmentStartSamples[fragTrak.id] ||= trak.samples[trak.nextSample];
|
|
90
|
+
}
|
|
91
|
+
try {
|
|
92
|
+
result = this.createFragment(
|
|
93
|
+
fragTrak.id,
|
|
94
|
+
trak.nextSample,
|
|
95
|
+
fragTrak.segmentStream
|
|
96
|
+
);
|
|
97
|
+
} catch (error) {
|
|
98
|
+
console.log("Failed to createFragment", error);
|
|
99
|
+
}
|
|
100
|
+
if (result) {
|
|
101
|
+
fragTrak.segmentStream = result;
|
|
102
|
+
trak.nextSample++;
|
|
103
|
+
} else {
|
|
104
|
+
finishedReading = await this.waitForMoreSamples();
|
|
105
|
+
break eachSample;
|
|
106
|
+
}
|
|
107
|
+
const nextSample = trak.samples[trak.nextSample];
|
|
108
|
+
const emitSegment = (
|
|
109
|
+
// if rapAlignement is true, we emit a fragment when we have a rap sample coming up next
|
|
110
|
+
fragTrak.rapAlignement === true && nextSample?.is_sync || // if rapAlignement is false, we emit a fragment when we have the required number of samples
|
|
111
|
+
!fragTrak.rapAlignement && trak.nextSample % fragTrak.nb_samples === 0 || // // if this is the last sample, we emit the fragment
|
|
112
|
+
// finished ||
|
|
113
|
+
// if we have more samples than the number of samples requested, we emit the fragment
|
|
114
|
+
trak.nextSample >= trak.samples.length
|
|
115
|
+
);
|
|
116
|
+
if (emitSegment) {
|
|
117
|
+
if (trak.nextSample >= trak.samples.length) {
|
|
118
|
+
trackInfo[fragTrak.id].complete = true;
|
|
119
|
+
}
|
|
120
|
+
const startSample = fragmentStartSamples[fragTrak.id];
|
|
121
|
+
const endSample = trak.samples[trak.nextSample - 1];
|
|
122
|
+
if (!startSample || !endSample) {
|
|
123
|
+
throw new Error("startSample or endSample is undefined");
|
|
124
|
+
}
|
|
125
|
+
yield {
|
|
126
|
+
track: fragTrak.id,
|
|
127
|
+
segment: trackInfo[fragTrak.id].index,
|
|
128
|
+
data: fragTrak.segmentStream.buffer,
|
|
129
|
+
complete: trackInfo[fragTrak.id].complete,
|
|
130
|
+
cts: startSample.cts,
|
|
131
|
+
dts: startSample.dts,
|
|
132
|
+
duration: endSample.dts - startSample.dts + endSample.duration
|
|
133
|
+
};
|
|
134
|
+
trackInfo[fragTrak.id].index += 1;
|
|
135
|
+
fragTrak.segmentStream = null;
|
|
136
|
+
delete fragmentStartSamples[fragTrak.id];
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
finishedReading = await this.waitForMoreSamples();
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
waitForMoreSamples() {
|
|
144
|
+
if (this._hasSeenLastSamples) {
|
|
145
|
+
return Promise.resolve(true);
|
|
146
|
+
}
|
|
147
|
+
return new Promise((resolve) => {
|
|
148
|
+
this.waitingForSamples.push(resolve);
|
|
149
|
+
});
|
|
150
|
+
}
|
|
151
|
+
processSamples(last) {
|
|
152
|
+
this._hasSeenLastSamples = last;
|
|
153
|
+
for (const observer of this.waitingForSamples) {
|
|
154
|
+
observer(last);
|
|
155
|
+
}
|
|
156
|
+
this.waitingForSamples = [];
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
export {
|
|
160
|
+
MP4File
|
|
161
|
+
};
|