@editframe/elements 0.6.0-beta.19 → 0.6.0-beta.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/lib/av/EncodedAsset.cjs +570 -0
- package/dist/lib/av/EncodedAsset.js +553 -0
- package/dist/lib/av/MP4File.cjs +182 -0
- package/dist/lib/av/MP4File.js +165 -0
- package/dist/lib/av/msToTimeCode.cjs +15 -0
- package/dist/lib/av/msToTimeCode.js +15 -0
- package/dist/lib/util/awaitMicrotask.cjs +4 -0
- package/dist/lib/util/awaitMicrotask.js +4 -0
- package/dist/lib/util/memoize.cjs +14 -0
- package/dist/lib/util/memoize.js +14 -0
- package/dist/packages/elements/src/EF_FRAMEGEN.cjs +200 -0
- package/dist/packages/elements/src/EF_FRAMEGEN.d.ts +45 -0
- package/dist/packages/elements/src/EF_FRAMEGEN.js +200 -0
- package/dist/packages/elements/src/EF_INTERACTIVE.cjs +4 -0
- package/dist/packages/elements/src/EF_INTERACTIVE.d.ts +1 -0
- package/dist/packages/elements/src/EF_INTERACTIVE.js +4 -0
- package/dist/packages/elements/src/elements/CrossUpdateController.cjs +16 -0
- package/dist/packages/elements/src/elements/CrossUpdateController.d.ts +9 -0
- package/dist/packages/elements/src/elements/CrossUpdateController.js +16 -0
- package/dist/packages/elements/src/elements/EFAudio.cjs +53 -0
- package/dist/packages/elements/src/elements/EFAudio.d.ts +10 -0
- package/dist/packages/elements/src/elements/EFAudio.js +54 -0
- package/dist/packages/elements/src/elements/EFCaptions.cjs +164 -0
- package/dist/packages/elements/src/elements/EFCaptions.d.ts +38 -0
- package/dist/packages/elements/src/elements/EFCaptions.js +166 -0
- package/dist/packages/elements/src/elements/EFImage.cjs +79 -0
- package/dist/packages/elements/src/elements/EFImage.d.ts +14 -0
- package/dist/packages/elements/src/elements/EFImage.js +80 -0
- package/dist/packages/elements/src/elements/EFMedia.cjs +334 -0
- package/dist/packages/elements/src/elements/EFMedia.d.ts +61 -0
- package/dist/packages/elements/src/elements/EFMedia.js +334 -0
- package/dist/packages/elements/src/elements/EFSourceMixin.cjs +55 -0
- package/dist/packages/elements/src/elements/EFSourceMixin.d.ts +12 -0
- package/dist/packages/elements/src/elements/EFSourceMixin.js +55 -0
- package/dist/packages/elements/src/elements/EFTemporal.cjs +198 -0
- package/dist/packages/elements/src/elements/EFTemporal.d.ts +36 -0
- package/dist/packages/elements/src/elements/EFTemporal.js +198 -0
- package/dist/packages/elements/src/elements/EFTimegroup.browsertest.d.ts +12 -0
- package/dist/packages/elements/src/elements/EFTimegroup.cjs +350 -0
- package/dist/packages/elements/src/elements/EFTimegroup.d.ts +39 -0
- package/dist/packages/elements/src/elements/EFTimegroup.js +351 -0
- package/dist/packages/elements/src/elements/EFTimeline.cjs +15 -0
- package/dist/packages/elements/src/elements/EFTimeline.d.ts +3 -0
- package/dist/packages/elements/src/elements/EFTimeline.js +15 -0
- package/dist/packages/elements/src/elements/EFVideo.cjs +109 -0
- package/dist/packages/elements/src/elements/EFVideo.d.ts +14 -0
- package/dist/packages/elements/src/elements/EFVideo.js +110 -0
- package/dist/packages/elements/src/elements/EFWaveform.cjs +235 -0
- package/dist/packages/elements/src/elements/EFWaveform.d.ts +28 -0
- package/dist/packages/elements/src/elements/EFWaveform.js +219 -0
- package/dist/packages/elements/src/elements/FetchMixin.cjs +28 -0
- package/dist/packages/elements/src/elements/FetchMixin.d.ts +8 -0
- package/dist/packages/elements/src/elements/FetchMixin.js +28 -0
- package/dist/packages/elements/src/elements/TimegroupController.cjs +20 -0
- package/dist/packages/elements/src/elements/TimegroupController.d.ts +14 -0
- package/dist/packages/elements/src/elements/TimegroupController.js +20 -0
- package/dist/packages/elements/src/elements/durationConverter.cjs +8 -0
- package/dist/packages/elements/src/elements/durationConverter.d.ts +4 -0
- package/dist/packages/elements/src/elements/durationConverter.js +8 -0
- package/dist/packages/elements/src/elements/parseTimeToMs.cjs +12 -0
- package/dist/packages/elements/src/elements/parseTimeToMs.d.ts +1 -0
- package/dist/packages/elements/src/elements/parseTimeToMs.js +12 -0
- package/dist/packages/elements/src/elements/util.cjs +11 -0
- package/dist/packages/elements/src/elements/util.d.ts +4 -0
- package/dist/packages/elements/src/elements/util.js +11 -0
- package/dist/packages/elements/src/gui/EFFilmstrip.cjs +820 -0
- package/dist/packages/elements/src/gui/EFFilmstrip.d.ts +147 -0
- package/dist/packages/elements/src/gui/EFFilmstrip.js +828 -0
- package/dist/packages/elements/src/gui/EFWorkbench.cjs +213 -0
- package/dist/packages/elements/src/gui/EFWorkbench.d.ts +45 -0
- package/dist/packages/elements/src/gui/EFWorkbench.js +214 -0
- package/dist/packages/elements/src/gui/TWMixin.cjs +28 -0
- package/dist/packages/elements/src/gui/TWMixin.css.cjs +3 -0
- package/dist/packages/elements/src/gui/TWMixin.css.js +4 -0
- package/dist/packages/elements/src/gui/TWMixin.d.ts +3 -0
- package/dist/packages/elements/src/gui/TWMixin.js +28 -0
- package/dist/packages/elements/src/index.cjs +51 -0
- package/dist/packages/elements/src/index.d.ts +10 -0
- package/dist/packages/elements/src/index.js +24 -0
- package/dist/style.css +787 -0
- package/package.json +2 -2
- package/src/gui/EFFilmstrip.ts +3 -3
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
|
+
const MP4Box = require("mp4box");
|
|
4
|
+
function _interopNamespaceDefault(e) {
|
|
5
|
+
const n = Object.create(null, { [Symbol.toStringTag]: { value: "Module" } });
|
|
6
|
+
if (e) {
|
|
7
|
+
for (const k in e) {
|
|
8
|
+
if (k !== "default") {
|
|
9
|
+
const d = Object.getOwnPropertyDescriptor(e, k);
|
|
10
|
+
Object.defineProperty(n, k, d.get ? d : {
|
|
11
|
+
enumerable: true,
|
|
12
|
+
get: () => e[k]
|
|
13
|
+
});
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
n.default = e;
|
|
18
|
+
return Object.freeze(n);
|
|
19
|
+
}
|
|
20
|
+
const MP4Box__namespace = /* @__PURE__ */ _interopNamespaceDefault(MP4Box);
|
|
21
|
+
class MP4File extends MP4Box__namespace.ISOFile {
|
|
22
|
+
constructor() {
|
|
23
|
+
super(...arguments);
|
|
24
|
+
this.readyPromise = new Promise((resolve, reject) => {
|
|
25
|
+
this.onReady = () => resolve();
|
|
26
|
+
this.onError = reject;
|
|
27
|
+
});
|
|
28
|
+
this.waitingForSamples = [];
|
|
29
|
+
this._hasSeenLastSamples = false;
|
|
30
|
+
this._arrayBufferFileStart = 0;
|
|
31
|
+
}
|
|
32
|
+
setSegmentOptions(id, user, options) {
|
|
33
|
+
const trak = this.getTrackById(id);
|
|
34
|
+
if (trak) {
|
|
35
|
+
trak.nextSample = 0;
|
|
36
|
+
this.fragmentedTracks.push({
|
|
37
|
+
id,
|
|
38
|
+
user,
|
|
39
|
+
trak,
|
|
40
|
+
segmentStream: null,
|
|
41
|
+
nb_samples: "nbSamples" in options && options.nbSamples || 1e3,
|
|
42
|
+
rapAlignement: ("rapAlignement" in options && options.rapAlignement) ?? true
|
|
43
|
+
});
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
/**
|
|
47
|
+
* Fragments all tracks in a file into separate array buffers.
|
|
48
|
+
*/
|
|
49
|
+
async fragmentAllTracks() {
|
|
50
|
+
const trackBuffers = {};
|
|
51
|
+
for await (const segment of this.fragmentIterator()) {
|
|
52
|
+
(trackBuffers[segment.track] ??= []).push(segment.data);
|
|
53
|
+
}
|
|
54
|
+
return trackBuffers;
|
|
55
|
+
}
|
|
56
|
+
async *fragmentIterator() {
|
|
57
|
+
await this.readyPromise;
|
|
58
|
+
const trackInfo = {};
|
|
59
|
+
for (const videoTrack of this.getInfo().videoTracks) {
|
|
60
|
+
trackInfo[videoTrack.id] = { index: 0, complete: false };
|
|
61
|
+
this.setSegmentOptions(videoTrack.id, null, {
|
|
62
|
+
rapAlignement: true
|
|
63
|
+
});
|
|
64
|
+
}
|
|
65
|
+
for (const audioTrack of this.getInfo().audioTracks) {
|
|
66
|
+
trackInfo[audioTrack.id] = { index: 0, complete: false };
|
|
67
|
+
const sampleRate = audioTrack.audio.sample_rate;
|
|
68
|
+
const probablePacketSize = 1024;
|
|
69
|
+
const probableFourSecondsOfSamples = Math.ceil(
|
|
70
|
+
sampleRate / probablePacketSize * 4
|
|
71
|
+
);
|
|
72
|
+
this.setSegmentOptions(audioTrack.id, null, {
|
|
73
|
+
nbSamples: probableFourSecondsOfSamples
|
|
74
|
+
});
|
|
75
|
+
}
|
|
76
|
+
const initSegments = this.initializeSegmentation();
|
|
77
|
+
for (const initSegment of initSegments) {
|
|
78
|
+
yield {
|
|
79
|
+
track: initSegment.id,
|
|
80
|
+
segment: "init",
|
|
81
|
+
data: initSegment.buffer,
|
|
82
|
+
complete: false
|
|
83
|
+
};
|
|
84
|
+
}
|
|
85
|
+
const fragmentStartSamples = {};
|
|
86
|
+
let finishedReading = false;
|
|
87
|
+
const allTracksFinished = () => {
|
|
88
|
+
for (const fragmentedTrack of this.fragmentedTracks) {
|
|
89
|
+
if (!trackInfo[fragmentedTrack.id]?.complete) {
|
|
90
|
+
return false;
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
return true;
|
|
94
|
+
};
|
|
95
|
+
while (!(finishedReading && allTracksFinished())) {
|
|
96
|
+
for (const fragTrak of this.fragmentedTracks) {
|
|
97
|
+
const trak = fragTrak.trak;
|
|
98
|
+
if (trak.nextSample === void 0) {
|
|
99
|
+
throw new Error("trak.nextSample is undefined");
|
|
100
|
+
}
|
|
101
|
+
if (trak.samples === void 0) {
|
|
102
|
+
throw new Error("trak.samples is undefined");
|
|
103
|
+
}
|
|
104
|
+
while (trak.nextSample < trak.samples.length) {
|
|
105
|
+
let result = void 0;
|
|
106
|
+
const fragTrakNextSample = trak.samples[trak.nextSample];
|
|
107
|
+
if (fragTrakNextSample) {
|
|
108
|
+
fragmentStartSamples[fragTrak.id] ||= fragTrakNextSample;
|
|
109
|
+
}
|
|
110
|
+
try {
|
|
111
|
+
result = this.createFragment(
|
|
112
|
+
fragTrak.id,
|
|
113
|
+
trak.nextSample,
|
|
114
|
+
fragTrak.segmentStream
|
|
115
|
+
);
|
|
116
|
+
} catch (error) {
|
|
117
|
+
console.log("Failed to createFragment", error);
|
|
118
|
+
}
|
|
119
|
+
if (result) {
|
|
120
|
+
fragTrak.segmentStream = result;
|
|
121
|
+
trak.nextSample++;
|
|
122
|
+
} else {
|
|
123
|
+
finishedReading = await this.waitForMoreSamples();
|
|
124
|
+
break;
|
|
125
|
+
}
|
|
126
|
+
const nextSample = trak.samples[trak.nextSample];
|
|
127
|
+
const emitSegment = (
|
|
128
|
+
// if rapAlignement is true, we emit a fragment when we have a rap sample coming up next
|
|
129
|
+
fragTrak.rapAlignement === true && nextSample?.is_sync || // if rapAlignement is false, we emit a fragment when we have the required number of samples
|
|
130
|
+
!fragTrak.rapAlignement && trak.nextSample % fragTrak.nb_samples === 0 || // // if this is the last sample, we emit the fragment
|
|
131
|
+
// finished ||
|
|
132
|
+
// if we have more samples than the number of samples requested, we emit the fragment
|
|
133
|
+
trak.nextSample >= trak.samples.length
|
|
134
|
+
);
|
|
135
|
+
if (emitSegment) {
|
|
136
|
+
const trackInfoForFrag = trackInfo[fragTrak.id];
|
|
137
|
+
if (!trackInfoForFrag) {
|
|
138
|
+
throw new Error("trackInfoForFrag is undefined");
|
|
139
|
+
}
|
|
140
|
+
if (trak.nextSample >= trak.samples.length) {
|
|
141
|
+
trackInfoForFrag.complete = true;
|
|
142
|
+
}
|
|
143
|
+
const startSample = fragmentStartSamples[fragTrak.id];
|
|
144
|
+
const endSample = trak.samples[trak.nextSample - 1];
|
|
145
|
+
if (!startSample || !endSample) {
|
|
146
|
+
throw new Error("startSample or endSample is undefined");
|
|
147
|
+
}
|
|
148
|
+
yield {
|
|
149
|
+
track: fragTrak.id,
|
|
150
|
+
segment: trackInfoForFrag.index,
|
|
151
|
+
data: fragTrak.segmentStream.buffer,
|
|
152
|
+
complete: trackInfoForFrag.complete,
|
|
153
|
+
cts: startSample.cts,
|
|
154
|
+
dts: startSample.dts,
|
|
155
|
+
duration: endSample.cts - startSample.cts + endSample.duration
|
|
156
|
+
};
|
|
157
|
+
trackInfoForFrag.index += 1;
|
|
158
|
+
fragTrak.segmentStream = null;
|
|
159
|
+
delete fragmentStartSamples[fragTrak.id];
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
finishedReading = await this.waitForMoreSamples();
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
waitForMoreSamples() {
|
|
167
|
+
if (this._hasSeenLastSamples) {
|
|
168
|
+
return Promise.resolve(true);
|
|
169
|
+
}
|
|
170
|
+
return new Promise((resolve) => {
|
|
171
|
+
this.waitingForSamples.push(resolve);
|
|
172
|
+
});
|
|
173
|
+
}
|
|
174
|
+
processSamples(last) {
|
|
175
|
+
this._hasSeenLastSamples = last;
|
|
176
|
+
for (const observer of this.waitingForSamples) {
|
|
177
|
+
observer(last);
|
|
178
|
+
}
|
|
179
|
+
this.waitingForSamples = [];
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
exports.MP4File = MP4File;
|
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
import * as MP4Box from "mp4box";
|
|
2
|
+
class MP4File extends MP4Box.ISOFile {
|
|
3
|
+
constructor() {
|
|
4
|
+
super(...arguments);
|
|
5
|
+
this.readyPromise = new Promise((resolve, reject) => {
|
|
6
|
+
this.onReady = () => resolve();
|
|
7
|
+
this.onError = reject;
|
|
8
|
+
});
|
|
9
|
+
this.waitingForSamples = [];
|
|
10
|
+
this._hasSeenLastSamples = false;
|
|
11
|
+
this._arrayBufferFileStart = 0;
|
|
12
|
+
}
|
|
13
|
+
setSegmentOptions(id, user, options) {
|
|
14
|
+
const trak = this.getTrackById(id);
|
|
15
|
+
if (trak) {
|
|
16
|
+
trak.nextSample = 0;
|
|
17
|
+
this.fragmentedTracks.push({
|
|
18
|
+
id,
|
|
19
|
+
user,
|
|
20
|
+
trak,
|
|
21
|
+
segmentStream: null,
|
|
22
|
+
nb_samples: "nbSamples" in options && options.nbSamples || 1e3,
|
|
23
|
+
rapAlignement: ("rapAlignement" in options && options.rapAlignement) ?? true
|
|
24
|
+
});
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
/**
|
|
28
|
+
* Fragments all tracks in a file into separate array buffers.
|
|
29
|
+
*/
|
|
30
|
+
async fragmentAllTracks() {
|
|
31
|
+
const trackBuffers = {};
|
|
32
|
+
for await (const segment of this.fragmentIterator()) {
|
|
33
|
+
(trackBuffers[segment.track] ??= []).push(segment.data);
|
|
34
|
+
}
|
|
35
|
+
return trackBuffers;
|
|
36
|
+
}
|
|
37
|
+
async *fragmentIterator() {
|
|
38
|
+
await this.readyPromise;
|
|
39
|
+
const trackInfo = {};
|
|
40
|
+
for (const videoTrack of this.getInfo().videoTracks) {
|
|
41
|
+
trackInfo[videoTrack.id] = { index: 0, complete: false };
|
|
42
|
+
this.setSegmentOptions(videoTrack.id, null, {
|
|
43
|
+
rapAlignement: true
|
|
44
|
+
});
|
|
45
|
+
}
|
|
46
|
+
for (const audioTrack of this.getInfo().audioTracks) {
|
|
47
|
+
trackInfo[audioTrack.id] = { index: 0, complete: false };
|
|
48
|
+
const sampleRate = audioTrack.audio.sample_rate;
|
|
49
|
+
const probablePacketSize = 1024;
|
|
50
|
+
const probableFourSecondsOfSamples = Math.ceil(
|
|
51
|
+
sampleRate / probablePacketSize * 4
|
|
52
|
+
);
|
|
53
|
+
this.setSegmentOptions(audioTrack.id, null, {
|
|
54
|
+
nbSamples: probableFourSecondsOfSamples
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
const initSegments = this.initializeSegmentation();
|
|
58
|
+
for (const initSegment of initSegments) {
|
|
59
|
+
yield {
|
|
60
|
+
track: initSegment.id,
|
|
61
|
+
segment: "init",
|
|
62
|
+
data: initSegment.buffer,
|
|
63
|
+
complete: false
|
|
64
|
+
};
|
|
65
|
+
}
|
|
66
|
+
const fragmentStartSamples = {};
|
|
67
|
+
let finishedReading = false;
|
|
68
|
+
const allTracksFinished = () => {
|
|
69
|
+
for (const fragmentedTrack of this.fragmentedTracks) {
|
|
70
|
+
if (!trackInfo[fragmentedTrack.id]?.complete) {
|
|
71
|
+
return false;
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
return true;
|
|
75
|
+
};
|
|
76
|
+
while (!(finishedReading && allTracksFinished())) {
|
|
77
|
+
for (const fragTrak of this.fragmentedTracks) {
|
|
78
|
+
const trak = fragTrak.trak;
|
|
79
|
+
if (trak.nextSample === void 0) {
|
|
80
|
+
throw new Error("trak.nextSample is undefined");
|
|
81
|
+
}
|
|
82
|
+
if (trak.samples === void 0) {
|
|
83
|
+
throw new Error("trak.samples is undefined");
|
|
84
|
+
}
|
|
85
|
+
while (trak.nextSample < trak.samples.length) {
|
|
86
|
+
let result = void 0;
|
|
87
|
+
const fragTrakNextSample = trak.samples[trak.nextSample];
|
|
88
|
+
if (fragTrakNextSample) {
|
|
89
|
+
fragmentStartSamples[fragTrak.id] ||= fragTrakNextSample;
|
|
90
|
+
}
|
|
91
|
+
try {
|
|
92
|
+
result = this.createFragment(
|
|
93
|
+
fragTrak.id,
|
|
94
|
+
trak.nextSample,
|
|
95
|
+
fragTrak.segmentStream
|
|
96
|
+
);
|
|
97
|
+
} catch (error) {
|
|
98
|
+
console.log("Failed to createFragment", error);
|
|
99
|
+
}
|
|
100
|
+
if (result) {
|
|
101
|
+
fragTrak.segmentStream = result;
|
|
102
|
+
trak.nextSample++;
|
|
103
|
+
} else {
|
|
104
|
+
finishedReading = await this.waitForMoreSamples();
|
|
105
|
+
break;
|
|
106
|
+
}
|
|
107
|
+
const nextSample = trak.samples[trak.nextSample];
|
|
108
|
+
const emitSegment = (
|
|
109
|
+
// if rapAlignement is true, we emit a fragment when we have a rap sample coming up next
|
|
110
|
+
fragTrak.rapAlignement === true && nextSample?.is_sync || // if rapAlignement is false, we emit a fragment when we have the required number of samples
|
|
111
|
+
!fragTrak.rapAlignement && trak.nextSample % fragTrak.nb_samples === 0 || // // if this is the last sample, we emit the fragment
|
|
112
|
+
// finished ||
|
|
113
|
+
// if we have more samples than the number of samples requested, we emit the fragment
|
|
114
|
+
trak.nextSample >= trak.samples.length
|
|
115
|
+
);
|
|
116
|
+
if (emitSegment) {
|
|
117
|
+
const trackInfoForFrag = trackInfo[fragTrak.id];
|
|
118
|
+
if (!trackInfoForFrag) {
|
|
119
|
+
throw new Error("trackInfoForFrag is undefined");
|
|
120
|
+
}
|
|
121
|
+
if (trak.nextSample >= trak.samples.length) {
|
|
122
|
+
trackInfoForFrag.complete = true;
|
|
123
|
+
}
|
|
124
|
+
const startSample = fragmentStartSamples[fragTrak.id];
|
|
125
|
+
const endSample = trak.samples[trak.nextSample - 1];
|
|
126
|
+
if (!startSample || !endSample) {
|
|
127
|
+
throw new Error("startSample or endSample is undefined");
|
|
128
|
+
}
|
|
129
|
+
yield {
|
|
130
|
+
track: fragTrak.id,
|
|
131
|
+
segment: trackInfoForFrag.index,
|
|
132
|
+
data: fragTrak.segmentStream.buffer,
|
|
133
|
+
complete: trackInfoForFrag.complete,
|
|
134
|
+
cts: startSample.cts,
|
|
135
|
+
dts: startSample.dts,
|
|
136
|
+
duration: endSample.cts - startSample.cts + endSample.duration
|
|
137
|
+
};
|
|
138
|
+
trackInfoForFrag.index += 1;
|
|
139
|
+
fragTrak.segmentStream = null;
|
|
140
|
+
delete fragmentStartSamples[fragTrak.id];
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
finishedReading = await this.waitForMoreSamples();
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
waitForMoreSamples() {
|
|
148
|
+
if (this._hasSeenLastSamples) {
|
|
149
|
+
return Promise.resolve(true);
|
|
150
|
+
}
|
|
151
|
+
return new Promise((resolve) => {
|
|
152
|
+
this.waitingForSamples.push(resolve);
|
|
153
|
+
});
|
|
154
|
+
}
|
|
155
|
+
processSamples(last) {
|
|
156
|
+
this._hasSeenLastSamples = last;
|
|
157
|
+
for (const observer of this.waitingForSamples) {
|
|
158
|
+
observer(last);
|
|
159
|
+
}
|
|
160
|
+
this.waitingForSamples = [];
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
export {
|
|
164
|
+
MP4File
|
|
165
|
+
};
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
|
+
const msToTimeCode = (ms, subSecond = false) => {
|
|
4
|
+
const seconds = Math.floor(ms / 1e3);
|
|
5
|
+
const minutes = Math.floor(seconds / 60);
|
|
6
|
+
const hours = Math.floor(minutes / 60);
|
|
7
|
+
const pad = (num) => num.toString().padStart(2, "0");
|
|
8
|
+
let timecode = `${pad(hours)}:${pad(minutes % 60)}:${pad(seconds % 60)}`;
|
|
9
|
+
if (subSecond) {
|
|
10
|
+
const subSeconds = Math.floor(ms % 1e3 / 10);
|
|
11
|
+
timecode += `.${subSeconds.toString().padStart(2, "0")}`;
|
|
12
|
+
}
|
|
13
|
+
return timecode;
|
|
14
|
+
};
|
|
15
|
+
exports.msToTimeCode = msToTimeCode;
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
const msToTimeCode = (ms, subSecond = false) => {
|
|
2
|
+
const seconds = Math.floor(ms / 1e3);
|
|
3
|
+
const minutes = Math.floor(seconds / 60);
|
|
4
|
+
const hours = Math.floor(minutes / 60);
|
|
5
|
+
const pad = (num) => num.toString().padStart(2, "0");
|
|
6
|
+
let timecode = `${pad(hours)}:${pad(minutes % 60)}:${pad(seconds % 60)}`;
|
|
7
|
+
if (subSecond) {
|
|
8
|
+
const subSeconds = Math.floor(ms % 1e3 / 10);
|
|
9
|
+
timecode += `.${subSeconds.toString().padStart(2, "0")}`;
|
|
10
|
+
}
|
|
11
|
+
return timecode;
|
|
12
|
+
};
|
|
13
|
+
export {
|
|
14
|
+
msToTimeCode
|
|
15
|
+
};
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
|
+
const memoize = (_target, _propertyKey, descriptor) => {
|
|
4
|
+
const get = descriptor.get;
|
|
5
|
+
if (!get) return;
|
|
6
|
+
const memoized = /* @__PURE__ */ new WeakMap();
|
|
7
|
+
descriptor.get = function() {
|
|
8
|
+
if (!memoized.has(this)) {
|
|
9
|
+
memoized.set(this, get.call(this));
|
|
10
|
+
}
|
|
11
|
+
return memoized.get(this);
|
|
12
|
+
};
|
|
13
|
+
};
|
|
14
|
+
exports.memoize = memoize;
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
const memoize = (_target, _propertyKey, descriptor) => {
|
|
2
|
+
const get = descriptor.get;
|
|
3
|
+
if (!get) return;
|
|
4
|
+
const memoized = /* @__PURE__ */ new WeakMap();
|
|
5
|
+
descriptor.get = function() {
|
|
6
|
+
if (!memoized.has(this)) {
|
|
7
|
+
memoized.set(this, get.call(this));
|
|
8
|
+
}
|
|
9
|
+
return memoized.get(this);
|
|
10
|
+
};
|
|
11
|
+
};
|
|
12
|
+
export {
|
|
13
|
+
memoize
|
|
14
|
+
};
|
|
@@ -0,0 +1,200 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
|
+
const task = require("@lit/task");
|
|
4
|
+
const awaitMicrotask = require("../../../lib/util/awaitMicrotask.cjs");
|
|
5
|
+
const EFTemporal = require("./elements/EFTemporal.cjs");
|
|
6
|
+
const EFTimegroup = require("./elements/EFTimegroup.cjs");
|
|
7
|
+
class TriggerCanvas {
|
|
8
|
+
constructor() {
|
|
9
|
+
this.canvas = document.createElement("canvas");
|
|
10
|
+
this.canvas.width = 1;
|
|
11
|
+
this.canvas.height = 1;
|
|
12
|
+
Object.assign(this.canvas.style, {
|
|
13
|
+
position: "fixed",
|
|
14
|
+
top: "0px",
|
|
15
|
+
left: "0px",
|
|
16
|
+
width: "1px",
|
|
17
|
+
height: "1px",
|
|
18
|
+
zIndex: "100000"
|
|
19
|
+
});
|
|
20
|
+
document.body.prepend(this.canvas);
|
|
21
|
+
const ctx = this.canvas.getContext("2d", { willReadFrequently: true });
|
|
22
|
+
if (!ctx) throw new Error("Canvas 2d context not ready");
|
|
23
|
+
this.ctx = ctx;
|
|
24
|
+
this.ctx.fillStyle = "black";
|
|
25
|
+
}
|
|
26
|
+
trigger() {
|
|
27
|
+
console.log("TRIGGERING CANVAS");
|
|
28
|
+
this.ctx.clearRect(0, 0, this.canvas.width, this.canvas.height);
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
class EfFramegen {
|
|
32
|
+
constructor() {
|
|
33
|
+
this.time = 0;
|
|
34
|
+
this.frameDurationMs = 0;
|
|
35
|
+
this.initialBusyTasks = Promise.resolve([]);
|
|
36
|
+
this.frameBox = document.createElement("div");
|
|
37
|
+
this.BRIDGE = window.FRAMEGEN_BRIDGE;
|
|
38
|
+
this.triggerCanvas = new TriggerCanvas();
|
|
39
|
+
if (this.BRIDGE) {
|
|
40
|
+
this.connectToBridge();
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
log(...args) {
|
|
44
|
+
console.log("[EF_FRAMEGEN]", ...args);
|
|
45
|
+
}
|
|
46
|
+
trace(...args) {
|
|
47
|
+
console.trace("[EF_FRAMEGEN]", ...args);
|
|
48
|
+
}
|
|
49
|
+
connectToBridge() {
|
|
50
|
+
const BRIDGE = this.BRIDGE;
|
|
51
|
+
if (!BRIDGE) {
|
|
52
|
+
throw new Error("No BRIDGE when attempting to connect to bridge");
|
|
53
|
+
}
|
|
54
|
+
BRIDGE.onInitialize(async (renderId, renderOptions) => {
|
|
55
|
+
this.log("BRIDGE.onInitialize", renderId, renderOptions);
|
|
56
|
+
await this.initialize(renderId, renderOptions);
|
|
57
|
+
BRIDGE.initialized(renderId);
|
|
58
|
+
});
|
|
59
|
+
BRIDGE.onBeginFrame((renderId, frameNumber, isLast) => {
|
|
60
|
+
this.log("BRIDGE.onBeginFrame", renderId, frameNumber, isLast);
|
|
61
|
+
this.beginFrame(renderId, frameNumber, isLast);
|
|
62
|
+
});
|
|
63
|
+
}
|
|
64
|
+
async initialize(renderId, renderOptions) {
|
|
65
|
+
addEventListener("unhandledrejection", (event) => {
|
|
66
|
+
this.trace("Unhandled rejection:", event.reason);
|
|
67
|
+
if (this.BRIDGE) {
|
|
68
|
+
this.BRIDGE.error(renderId, event.reason);
|
|
69
|
+
}
|
|
70
|
+
});
|
|
71
|
+
addEventListener("error", (event) => {
|
|
72
|
+
this.trace("Uncaught error", event.error);
|
|
73
|
+
if (this.BRIDGE) {
|
|
74
|
+
this.BRIDGE.error(renderId, event.error);
|
|
75
|
+
}
|
|
76
|
+
});
|
|
77
|
+
this.renderOptions = renderOptions;
|
|
78
|
+
const workbench = document.querySelector("ef-workbench");
|
|
79
|
+
if (!workbench) {
|
|
80
|
+
throw new Error("No workbench found");
|
|
81
|
+
}
|
|
82
|
+
workbench.rendering = true;
|
|
83
|
+
const timegroups = EFTimegroup.shallowGetTimegroups(workbench);
|
|
84
|
+
const temporals = EFTemporal.deepGetElementsWithFrameTasks(workbench);
|
|
85
|
+
const firstGroup = timegroups[0];
|
|
86
|
+
if (!firstGroup) {
|
|
87
|
+
throw new Error("No temporal elements found");
|
|
88
|
+
}
|
|
89
|
+
firstGroup.currentTimeMs = renderOptions.encoderOptions.fromMs;
|
|
90
|
+
this.frameDurationMs = 1e3 / renderOptions.encoderOptions.video.framerate;
|
|
91
|
+
this.initialBusyTasks = Promise.all(
|
|
92
|
+
temporals.filter((temporal) => temporal.frameTask.status < task.TaskStatus.COMPLETE).map((temporal) => temporal.frameTask).map((task2) => task2.taskComplete)
|
|
93
|
+
);
|
|
94
|
+
this.time = 0;
|
|
95
|
+
if (renderOptions.showFrameBox) {
|
|
96
|
+
Object.assign(this.frameBox.style, {
|
|
97
|
+
width: "200px",
|
|
98
|
+
height: "100px",
|
|
99
|
+
font: "30px Arial",
|
|
100
|
+
backgroundColor: "white",
|
|
101
|
+
position: "absolute",
|
|
102
|
+
top: "0px",
|
|
103
|
+
left: "0px",
|
|
104
|
+
zIndex: "100000"
|
|
105
|
+
});
|
|
106
|
+
document.body.prepend(this.frameBox);
|
|
107
|
+
}
|
|
108
|
+
this.audioBufferPromise = firstGroup.renderAudio(
|
|
109
|
+
renderOptions.encoderOptions.alignedFromUs / 1e3,
|
|
110
|
+
renderOptions.encoderOptions.alignedToUs / 1e3
|
|
111
|
+
// renderOptions.encoderOptions.fromMs,
|
|
112
|
+
// renderOptions.encoderOptions.toMs,
|
|
113
|
+
);
|
|
114
|
+
this.log("Initialized");
|
|
115
|
+
}
|
|
116
|
+
async beginFrame(renderId, frameNumber, isLast) {
|
|
117
|
+
if (this.renderOptions === void 0) {
|
|
118
|
+
throw new Error("No renderOptions");
|
|
119
|
+
}
|
|
120
|
+
if (this.renderOptions.showFrameBox) {
|
|
121
|
+
this.frameBox.innerHTML = `
|
|
122
|
+
<div>Frame #${frameNumber}</div>
|
|
123
|
+
<div>${this.time.toFixed(4)}</div>
|
|
124
|
+
`;
|
|
125
|
+
}
|
|
126
|
+
const workbench = document.querySelector("ef-workbench");
|
|
127
|
+
if (!workbench) {
|
|
128
|
+
throw new Error("No workbench found");
|
|
129
|
+
}
|
|
130
|
+
workbench.rendering = true;
|
|
131
|
+
const timegroups = EFTimegroup.shallowGetTimegroups(workbench);
|
|
132
|
+
const temporals = EFTemporal.deepGetElementsWithFrameTasks(workbench);
|
|
133
|
+
const firstGroup = timegroups[0];
|
|
134
|
+
if (!firstGroup) {
|
|
135
|
+
throw new Error("No temporal elements found");
|
|
136
|
+
}
|
|
137
|
+
this.time = this.renderOptions.encoderOptions.fromMs + frameNumber * this.frameDurationMs;
|
|
138
|
+
firstGroup.currentTimeMs = this.time;
|
|
139
|
+
console.log("Awaiting initialBusyTasks");
|
|
140
|
+
await this.initialBusyTasks;
|
|
141
|
+
console.log("Awaiting microtask");
|
|
142
|
+
await awaitMicrotask.awaitMicrotask();
|
|
143
|
+
console.log("Awaiting frame tasks");
|
|
144
|
+
const now = performance.now();
|
|
145
|
+
await Promise.all(
|
|
146
|
+
temporals.filter((temporal) => temporal.frameTask.status < task.TaskStatus.COMPLETE).map((temporal) => {
|
|
147
|
+
return temporal.frameTask;
|
|
148
|
+
}).map((task2) => task2.taskComplete)
|
|
149
|
+
);
|
|
150
|
+
console.log(
|
|
151
|
+
`frame:${frameNumber} All tasks complete ${performance.now() - now}ms`
|
|
152
|
+
);
|
|
153
|
+
if (isLast && this.audioBufferPromise) {
|
|
154
|
+
const renderedAudio = await this.audioBufferPromise;
|
|
155
|
+
const channelCount = renderedAudio.numberOfChannels;
|
|
156
|
+
const interleavedSamples = new Float32Array(
|
|
157
|
+
channelCount * renderedAudio.length
|
|
158
|
+
);
|
|
159
|
+
for (let i = 0; i < renderedAudio.length; i++) {
|
|
160
|
+
for (let j = 0; j < channelCount; j++) {
|
|
161
|
+
interleavedSamples.set(
|
|
162
|
+
renderedAudio.getChannelData(j).slice(i, i + 1),
|
|
163
|
+
i * channelCount + j
|
|
164
|
+
);
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
if (this.BRIDGE) {
|
|
168
|
+
this.triggerCanvas.trigger();
|
|
169
|
+
this.BRIDGE.frameReady(
|
|
170
|
+
renderId,
|
|
171
|
+
frameNumber,
|
|
172
|
+
interleavedSamples.buffer
|
|
173
|
+
);
|
|
174
|
+
} else {
|
|
175
|
+
const fileReader = new FileReader();
|
|
176
|
+
fileReader.readAsDataURL(new Blob([interleavedSamples.buffer]));
|
|
177
|
+
await new Promise((resolve, reject) => {
|
|
178
|
+
fileReader.onload = resolve;
|
|
179
|
+
fileReader.onerror = reject;
|
|
180
|
+
});
|
|
181
|
+
return fileReader.result;
|
|
182
|
+
}
|
|
183
|
+
} else {
|
|
184
|
+
if (this.BRIDGE) {
|
|
185
|
+
this.triggerCanvas.trigger();
|
|
186
|
+
this.BRIDGE.frameReady(renderId, frameNumber, new ArrayBuffer(0));
|
|
187
|
+
} else {
|
|
188
|
+
const fileReader = new FileReader();
|
|
189
|
+
fileReader.readAsDataURL(new Blob([]));
|
|
190
|
+
await new Promise((resolve, reject) => {
|
|
191
|
+
fileReader.onload = resolve;
|
|
192
|
+
fileReader.onerror = reject;
|
|
193
|
+
});
|
|
194
|
+
return fileReader.result;
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
window.EF_FRAMEGEN = new EfFramegen();
|
|
200
|
+
exports.EfFramegen = EfFramegen;
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import { VideoRenderOptions } from '../../assets/src';
|
|
2
|
+
|
|
3
|
+
declare global {
|
|
4
|
+
interface Window {
|
|
5
|
+
EF_FRAMEGEN?: EfFramegen;
|
|
6
|
+
FRAMEGEN_BRIDGE?: {
|
|
7
|
+
onInitialize: (callback: (renderId: string, renderOptions: VideoRenderOptions) => void) => void;
|
|
8
|
+
initialized(renderId: string): void;
|
|
9
|
+
onBeginFrame(callback: (renderId: string, frameNumber: number, isLast: boolean) => void): void;
|
|
10
|
+
onTriggerCanvas(callback: () => void): void;
|
|
11
|
+
frameReady(renderId: string, frameNumber: number, audioSamples: ArrayBuffer): void;
|
|
12
|
+
error(renderId: string, error: Error): void;
|
|
13
|
+
};
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
declare class TriggerCanvas {
|
|
17
|
+
private canvas;
|
|
18
|
+
private ctx;
|
|
19
|
+
constructor();
|
|
20
|
+
trigger(): void;
|
|
21
|
+
}
|
|
22
|
+
export declare class EfFramegen {
|
|
23
|
+
time: number;
|
|
24
|
+
frameDurationMs: number;
|
|
25
|
+
initialBusyTasks: Promise<unknown[]>;
|
|
26
|
+
audioBufferPromise?: Promise<AudioBuffer>;
|
|
27
|
+
renderOptions?: VideoRenderOptions;
|
|
28
|
+
frameBox: HTMLDivElement;
|
|
29
|
+
BRIDGE: {
|
|
30
|
+
onInitialize: (callback: (renderId: string, renderOptions: VideoRenderOptions) => void) => void;
|
|
31
|
+
initialized(renderId: string): void;
|
|
32
|
+
onBeginFrame(callback: (renderId: string, frameNumber: number, isLast: boolean) => void): void;
|
|
33
|
+
onTriggerCanvas(callback: () => void): void;
|
|
34
|
+
frameReady(renderId: string, frameNumber: number, audioSamples: ArrayBuffer): void;
|
|
35
|
+
error(renderId: string, error: Error): void;
|
|
36
|
+
} | undefined;
|
|
37
|
+
triggerCanvas: TriggerCanvas;
|
|
38
|
+
log(...args: any[]): void;
|
|
39
|
+
trace(...args: any[]): void;
|
|
40
|
+
constructor();
|
|
41
|
+
connectToBridge(): void;
|
|
42
|
+
initialize(renderId: string, renderOptions: VideoRenderOptions): Promise<void>;
|
|
43
|
+
beginFrame(renderId: string, frameNumber: number, isLast: boolean): Promise<string | ArrayBuffer | null | undefined>;
|
|
44
|
+
}
|
|
45
|
+
export {};
|