@editframe/elements 0.5.0-beta.7 → 0.5.0-beta.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{editor/util/EncodedAsset → lib/av}/EncodedAsset.mjs +24 -22
- package/dist/{editor/util → lib/av}/MP4File.mjs +50 -51
- package/dist/{util → lib/util}/memoize.mjs +1 -2
- package/dist/packages/elements/src/EF_FRAMEGEN.mjs +183 -0
- package/dist/{elements → packages/elements}/src/elements/EFAudio.mjs +1 -4
- package/dist/{elements → packages/elements}/src/elements/EFCaptions.mjs +5 -7
- package/dist/{elements → packages/elements}/src/elements/EFImage.mjs +2 -3
- package/dist/{elements → packages/elements}/src/elements/EFMedia.mjs +18 -27
- package/dist/{elements → packages/elements}/src/elements/EFSourceMixin.mjs +5 -7
- package/dist/{elements → packages/elements}/src/elements/EFTemporal.mjs +2 -15
- package/dist/{elements → packages/elements}/src/elements/EFTimegroup.mjs +32 -43
- package/dist/{elements → packages/elements}/src/elements/EFVideo.mjs +8 -30
- package/dist/{elements → packages/elements}/src/elements/EFWaveform.mjs +1 -2
- package/dist/{elements → packages/elements}/src/elements/FetchMixin.mjs +4 -6
- package/dist/{elements → packages/elements}/src/gui/EFFilmstrip.mjs +10 -22
- package/dist/{elements → packages/elements}/src/gui/EFWorkbench.mjs +4 -25
- package/dist/packages/elements/src/gui/TWMixin.css.mjs +4 -0
- package/dist/style.css +13 -4
- package/package.json +7 -2
- package/dist/elements/src/EF_FRAMEGEN.mjs +0 -130
- package/dist/elements/src/gui/TWMixin.css.mjs +0 -4
- package/dist/util/awaitAnimationFrame.mjs +0 -11
- package/docker-compose.yaml +0 -17
- package/src/EF_FRAMEGEN.ts +0 -208
- package/src/EF_INTERACTIVE.ts +0 -2
- package/src/elements/CrossUpdateController.ts +0 -18
- package/src/elements/EFAudio.ts +0 -42
- package/src/elements/EFCaptions.ts +0 -202
- package/src/elements/EFImage.ts +0 -70
- package/src/elements/EFMedia.ts +0 -395
- package/src/elements/EFSourceMixin.ts +0 -57
- package/src/elements/EFTemporal.ts +0 -246
- package/src/elements/EFTimegroup.browsertest.ts +0 -360
- package/src/elements/EFTimegroup.ts +0 -394
- package/src/elements/EFTimeline.ts +0 -13
- package/src/elements/EFVideo.ts +0 -114
- package/src/elements/EFWaveform.ts +0 -407
- package/src/elements/FetchMixin.ts +0 -18
- package/src/elements/TimegroupController.ts +0 -25
- package/src/elements/buildLitFixture.ts +0 -13
- package/src/elements/durationConverter.ts +0 -6
- package/src/elements/parseTimeToMs.ts +0 -10
- package/src/elements/util.ts +0 -24
- package/src/gui/EFFilmstrip.ts +0 -702
- package/src/gui/EFWorkbench.ts +0 -242
- package/src/gui/TWMixin.css +0 -3
- package/src/gui/TWMixin.ts +0 -27
- package/src/util.d.ts +0 -1
- /package/dist/{editor → lib/av}/msToTimeCode.mjs +0 -0
- /package/dist/{util → lib/util}/awaitMicrotask.mjs +0 -0
- /package/dist/{elements → packages/elements}/src/EF_INTERACTIVE.mjs +0 -0
- /package/dist/{elements → packages/elements}/src/elements/CrossUpdateController.mjs +0 -0
- /package/dist/{elements → packages/elements}/src/elements/EFTimeline.mjs +0 -0
- /package/dist/{elements → packages/elements}/src/elements/TimegroupController.mjs +0 -0
- /package/dist/{elements → packages/elements}/src/elements/durationConverter.mjs +0 -0
- /package/dist/{elements → packages/elements}/src/elements/parseTimeToMs.mjs +0 -0
- /package/dist/{elements → packages/elements}/src/elements/util.mjs +0 -0
- /package/dist/{elements → packages/elements}/src/elements.mjs +0 -0
- /package/dist/{elements → packages/elements}/src/gui/TWMixin.mjs +0 -0
|
@@ -1,15 +1,14 @@
|
|
|
1
|
-
import { memoize } from "../../../util/memoize.mjs";
|
|
2
1
|
import * as MP4Box from "mp4box";
|
|
3
|
-
import {
|
|
2
|
+
import { memoize } from "../util/memoize.mjs";
|
|
3
|
+
import { MP4File } from "./MP4File.mjs";
|
|
4
4
|
var __defProp = Object.defineProperty;
|
|
5
5
|
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
6
6
|
var __decorateClass = (decorators, target, key, kind) => {
|
|
7
|
-
var result =
|
|
7
|
+
var result = __getOwnPropDesc(target, key);
|
|
8
8
|
for (var i = decorators.length - 1, decorator; i >= 0; i--)
|
|
9
9
|
if (decorator = decorators[i])
|
|
10
|
-
result =
|
|
11
|
-
if (
|
|
12
|
-
__defProp(target, key, result);
|
|
10
|
+
result = decorator(target, key, result) || result;
|
|
11
|
+
if (result) __defProp(target, key, result);
|
|
13
12
|
return result;
|
|
14
13
|
};
|
|
15
14
|
const BUFFER_SIZE = 10;
|
|
@@ -49,7 +48,7 @@ class ISOFileAsset extends FileAsset {
|
|
|
49
48
|
}
|
|
50
49
|
__decorateClass([
|
|
51
50
|
memoize
|
|
52
|
-
], ISOFileAsset.prototype, "fileInfo"
|
|
51
|
+
], ISOFileAsset.prototype, "fileInfo");
|
|
53
52
|
const _VideoAsset = class _VideoAsset2 extends ISOFileAsset {
|
|
54
53
|
constructor(localName, mp4boxFile, file) {
|
|
55
54
|
super(localName, file, mp4boxFile);
|
|
@@ -117,6 +116,7 @@ const _VideoAsset = class _VideoAsset2 extends ISOFileAsset {
|
|
|
117
116
|
});
|
|
118
117
|
this.decodedFrames = [];
|
|
119
118
|
this.lastDecodedSample = void 0;
|
|
119
|
+
this.lastSoughtFrame?.close();
|
|
120
120
|
this.lastSoughtFrame = void 0;
|
|
121
121
|
}
|
|
122
122
|
addEventListener(type, callback) {
|
|
@@ -268,7 +268,6 @@ const _VideoAsset = class _VideoAsset2 extends ISOFileAsset {
|
|
|
268
268
|
codec: this.defaultVideoTrack.codec,
|
|
269
269
|
codedWidth: this.defaultVideoTrack.track_width,
|
|
270
270
|
codedHeight: this.defaultVideoTrack.track_height,
|
|
271
|
-
// hardwareAcceleration: "prefer-hardware",
|
|
272
271
|
optimizeForLatency: true,
|
|
273
272
|
description
|
|
274
273
|
};
|
|
@@ -406,6 +405,9 @@ const _VideoAsset = class _VideoAsset2 extends ISOFileAsset {
|
|
|
406
405
|
}
|
|
407
406
|
this.removeEventListener("frame", maybeFrame);
|
|
408
407
|
if (frame) {
|
|
408
|
+
if (this.lastSoughtFrame && !this.decodedFrames.includes(this.lastSoughtFrame)) {
|
|
409
|
+
this.lastSoughtFrame.close();
|
|
410
|
+
}
|
|
409
411
|
this.lastSoughtFrame = frame;
|
|
410
412
|
}
|
|
411
413
|
return frame;
|
|
@@ -422,25 +424,25 @@ const _VideoAsset = class _VideoAsset2 extends ISOFileAsset {
|
|
|
422
424
|
};
|
|
423
425
|
__decorateClass([
|
|
424
426
|
memoize
|
|
425
|
-
], _VideoAsset.prototype, "editsOffset"
|
|
427
|
+
], _VideoAsset.prototype, "editsOffset");
|
|
426
428
|
__decorateClass([
|
|
427
429
|
memoize
|
|
428
|
-
], _VideoAsset.prototype, "timescale"
|
|
430
|
+
], _VideoAsset.prototype, "timescale");
|
|
429
431
|
__decorateClass([
|
|
430
432
|
memoize
|
|
431
|
-
], _VideoAsset.prototype, "samples"
|
|
433
|
+
], _VideoAsset.prototype, "samples");
|
|
432
434
|
__decorateClass([
|
|
433
435
|
memoize
|
|
434
|
-
], _VideoAsset.prototype, "displayOrderedSamples"
|
|
436
|
+
], _VideoAsset.prototype, "displayOrderedSamples");
|
|
435
437
|
__decorateClass([
|
|
436
438
|
memoize
|
|
437
|
-
], _VideoAsset.prototype, "defaultVideoTrack"
|
|
439
|
+
], _VideoAsset.prototype, "defaultVideoTrack");
|
|
438
440
|
__decorateClass([
|
|
439
441
|
memoize
|
|
440
|
-
], _VideoAsset.prototype, "defaultVideoTrak"
|
|
442
|
+
], _VideoAsset.prototype, "defaultVideoTrak");
|
|
441
443
|
__decorateClass([
|
|
442
444
|
memoize
|
|
443
|
-
], _VideoAsset.prototype, "duration"
|
|
445
|
+
], _VideoAsset.prototype, "duration");
|
|
444
446
|
let VideoAsset = _VideoAsset;
|
|
445
447
|
const _AudioAsset = class _AudioAsset2 extends ISOFileAsset {
|
|
446
448
|
static async createFromReadableStream(id, stream, file) {
|
|
@@ -493,19 +495,19 @@ const _AudioAsset = class _AudioAsset2 extends ISOFileAsset {
|
|
|
493
495
|
};
|
|
494
496
|
__decorateClass([
|
|
495
497
|
memoize
|
|
496
|
-
], _AudioAsset.prototype, "defaultAudioTrack"
|
|
498
|
+
], _AudioAsset.prototype, "defaultAudioTrack");
|
|
497
499
|
__decorateClass([
|
|
498
500
|
memoize
|
|
499
|
-
], _AudioAsset.prototype, "defaultAudioTrak"
|
|
501
|
+
], _AudioAsset.prototype, "defaultAudioTrak");
|
|
500
502
|
__decorateClass([
|
|
501
503
|
memoize
|
|
502
|
-
], _AudioAsset.prototype, "audioCodec"
|
|
504
|
+
], _AudioAsset.prototype, "audioCodec");
|
|
503
505
|
__decorateClass([
|
|
504
506
|
memoize
|
|
505
|
-
], _AudioAsset.prototype, "samplerate"
|
|
507
|
+
], _AudioAsset.prototype, "samplerate");
|
|
506
508
|
__decorateClass([
|
|
507
509
|
memoize
|
|
508
|
-
], _AudioAsset.prototype, "channelCount"
|
|
510
|
+
], _AudioAsset.prototype, "channelCount");
|
|
509
511
|
const _ImageAsset = class _ImageAsset2 extends FileAsset {
|
|
510
512
|
static async createFromReadableStream(id, file) {
|
|
511
513
|
if (file.size === 0) {
|
|
@@ -525,10 +527,10 @@ const _ImageAsset = class _ImageAsset2 extends FileAsset {
|
|
|
525
527
|
};
|
|
526
528
|
__decorateClass([
|
|
527
529
|
memoize
|
|
528
|
-
], _ImageAsset.prototype, "objectUrl"
|
|
530
|
+
], _ImageAsset.prototype, "objectUrl");
|
|
529
531
|
__decorateClass([
|
|
530
532
|
memoize
|
|
531
|
-
], _ImageAsset.prototype, "format"
|
|
533
|
+
], _ImageAsset.prototype, "format");
|
|
532
534
|
export {
|
|
533
535
|
AssetNotAvailableLocally,
|
|
534
536
|
FileAsset,
|
|
@@ -82,60 +82,59 @@ class MP4File extends MP4Box.ISOFile {
|
|
|
82
82
|
if (trak.samples === void 0) {
|
|
83
83
|
throw new Error("trak.samples is undefined");
|
|
84
84
|
}
|
|
85
|
-
eachSample:
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
fragTrak.segmentStream
|
|
96
|
-
);
|
|
97
|
-
} catch (error) {
|
|
98
|
-
console.log("Failed to createFragment", error);
|
|
99
|
-
}
|
|
100
|
-
if (result) {
|
|
101
|
-
fragTrak.segmentStream = result;
|
|
102
|
-
trak.nextSample++;
|
|
103
|
-
} else {
|
|
104
|
-
finishedReading = await this.waitForMoreSamples();
|
|
105
|
-
break eachSample;
|
|
106
|
-
}
|
|
107
|
-
const nextSample = trak.samples[trak.nextSample];
|
|
108
|
-
const emitSegment = (
|
|
109
|
-
// if rapAlignement is true, we emit a fragment when we have a rap sample coming up next
|
|
110
|
-
fragTrak.rapAlignement === true && nextSample?.is_sync || // if rapAlignement is false, we emit a fragment when we have the required number of samples
|
|
111
|
-
!fragTrak.rapAlignement && trak.nextSample % fragTrak.nb_samples === 0 || // // if this is the last sample, we emit the fragment
|
|
112
|
-
// finished ||
|
|
113
|
-
// if we have more samples than the number of samples requested, we emit the fragment
|
|
114
|
-
trak.nextSample >= trak.samples.length
|
|
85
|
+
eachSample: while (trak.nextSample < trak.samples.length) {
|
|
86
|
+
let result = void 0;
|
|
87
|
+
if (trak?.samples[trak.nextSample]) {
|
|
88
|
+
fragmentStartSamples[fragTrak.id] ||= trak.samples[trak.nextSample];
|
|
89
|
+
}
|
|
90
|
+
try {
|
|
91
|
+
result = this.createFragment(
|
|
92
|
+
fragTrak.id,
|
|
93
|
+
trak.nextSample,
|
|
94
|
+
fragTrak.segmentStream
|
|
115
95
|
);
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
96
|
+
} catch (error) {
|
|
97
|
+
console.log("Failed to createFragment", error);
|
|
98
|
+
}
|
|
99
|
+
if (result) {
|
|
100
|
+
fragTrak.segmentStream = result;
|
|
101
|
+
trak.nextSample++;
|
|
102
|
+
} else {
|
|
103
|
+
finishedReading = await this.waitForMoreSamples();
|
|
104
|
+
break eachSample;
|
|
105
|
+
}
|
|
106
|
+
const nextSample = trak.samples[trak.nextSample];
|
|
107
|
+
const emitSegment = (
|
|
108
|
+
// if rapAlignement is true, we emit a fragment when we have a rap sample coming up next
|
|
109
|
+
fragTrak.rapAlignement === true && nextSample?.is_sync || // if rapAlignement is false, we emit a fragment when we have the required number of samples
|
|
110
|
+
!fragTrak.rapAlignement && trak.nextSample % fragTrak.nb_samples === 0 || // // if this is the last sample, we emit the fragment
|
|
111
|
+
// finished ||
|
|
112
|
+
// if we have more samples than the number of samples requested, we emit the fragment
|
|
113
|
+
trak.nextSample >= trak.samples.length
|
|
114
|
+
);
|
|
115
|
+
if (emitSegment) {
|
|
116
|
+
if (trak.nextSample >= trak.samples.length) {
|
|
117
|
+
trackInfo[fragTrak.id].complete = true;
|
|
137
118
|
}
|
|
119
|
+
const startSample = fragmentStartSamples[fragTrak.id];
|
|
120
|
+
const endSample = trak.samples[trak.nextSample - 1];
|
|
121
|
+
if (!startSample || !endSample) {
|
|
122
|
+
throw new Error("startSample or endSample is undefined");
|
|
123
|
+
}
|
|
124
|
+
yield {
|
|
125
|
+
track: fragTrak.id,
|
|
126
|
+
segment: trackInfo[fragTrak.id].index,
|
|
127
|
+
data: fragTrak.segmentStream.buffer,
|
|
128
|
+
complete: trackInfo[fragTrak.id].complete,
|
|
129
|
+
cts: startSample.cts,
|
|
130
|
+
dts: startSample.dts,
|
|
131
|
+
duration: endSample.dts - startSample.dts + endSample.duration
|
|
132
|
+
};
|
|
133
|
+
trackInfo[fragTrak.id].index += 1;
|
|
134
|
+
fragTrak.segmentStream = null;
|
|
135
|
+
delete fragmentStartSamples[fragTrak.id];
|
|
138
136
|
}
|
|
137
|
+
}
|
|
139
138
|
}
|
|
140
139
|
finishedReading = await this.waitForMoreSamples();
|
|
141
140
|
}
|
|
@@ -0,0 +1,183 @@
|
|
|
1
|
+
import { deepGetElementsWithFrameTasks } from "./elements/EFTemporal.mjs";
|
|
2
|
+
import { awaitMicrotask } from "../../../lib/util/awaitMicrotask.mjs";
|
|
3
|
+
import { TaskStatus } from "@lit/task";
|
|
4
|
+
import { shallowGetTimegroups } from "./elements/EFTimegroup.mjs";
|
|
5
|
+
class TriggerCanvas {
|
|
6
|
+
constructor() {
|
|
7
|
+
this.canvas = document.createElement("canvas");
|
|
8
|
+
this.canvas.width = 1;
|
|
9
|
+
this.canvas.height = 1;
|
|
10
|
+
Object.assign(this.canvas.style, {
|
|
11
|
+
position: "fixed",
|
|
12
|
+
top: "0px",
|
|
13
|
+
left: "0px",
|
|
14
|
+
width: `1px`,
|
|
15
|
+
height: `1px`,
|
|
16
|
+
zIndex: "100000"
|
|
17
|
+
});
|
|
18
|
+
document.body.prepend(this.canvas);
|
|
19
|
+
this.ctx = this.canvas.getContext("2d", { willReadFrequently: true });
|
|
20
|
+
this.ctx.fillStyle = "black";
|
|
21
|
+
}
|
|
22
|
+
trigger() {
|
|
23
|
+
console.log("TRIGGERING CANVAS");
|
|
24
|
+
this.ctx.clearRect(0, 0, this.canvas.width, this.canvas.height);
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
class EfFramegen {
|
|
28
|
+
constructor() {
|
|
29
|
+
this.time = 0;
|
|
30
|
+
this.frameDurationMs = 0;
|
|
31
|
+
this.initialBusyTasks = Promise.resolve([]);
|
|
32
|
+
this.frameBox = document.createElement("div");
|
|
33
|
+
this.BRIDGE = window.FRAMEGEN_BRIDGE;
|
|
34
|
+
this.triggerCanvas = new TriggerCanvas();
|
|
35
|
+
if (this.BRIDGE) {
|
|
36
|
+
this.connectToBridge();
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
log(...args) {
|
|
40
|
+
console.log("[EF_FRAMEGEN]", ...args);
|
|
41
|
+
}
|
|
42
|
+
trace(...args) {
|
|
43
|
+
console.trace("[EF_FRAMEGEN]", ...args);
|
|
44
|
+
}
|
|
45
|
+
connectToBridge() {
|
|
46
|
+
const BRIDGE = this.BRIDGE;
|
|
47
|
+
if (!BRIDGE) {
|
|
48
|
+
throw new Error("No BRIDGE when attempting to connect to bridge");
|
|
49
|
+
}
|
|
50
|
+
BRIDGE.onInitialize(async (renderId, renderOptions) => {
|
|
51
|
+
this.log("BRIDGE.onInitialize", renderId, renderOptions);
|
|
52
|
+
await this.initialize(renderId, renderOptions);
|
|
53
|
+
BRIDGE.initialized(renderId);
|
|
54
|
+
});
|
|
55
|
+
BRIDGE.onBeginFrame((renderId, frameNumber, isLast) => {
|
|
56
|
+
this.log("BRIDGE.onBeginFrame", renderId, frameNumber, isLast);
|
|
57
|
+
this.beginFrame(renderId, frameNumber, isLast);
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
async initialize(renderId, renderOptions) {
|
|
61
|
+
addEventListener("unhandledrejection", (event) => {
|
|
62
|
+
this.trace("Unhandled rejection:", event.reason);
|
|
63
|
+
if (this.BRIDGE) {
|
|
64
|
+
this.BRIDGE.error(renderId, event.reason);
|
|
65
|
+
}
|
|
66
|
+
});
|
|
67
|
+
addEventListener("error", (event) => {
|
|
68
|
+
this.trace("Uncaught error", event.error);
|
|
69
|
+
if (this.BRIDGE) {
|
|
70
|
+
this.BRIDGE.error(renderId, event.error);
|
|
71
|
+
}
|
|
72
|
+
});
|
|
73
|
+
this.renderOptions = renderOptions;
|
|
74
|
+
const workbench = document.querySelector("ef-workbench");
|
|
75
|
+
workbench.rendering = true;
|
|
76
|
+
const timegroups = shallowGetTimegroups(workbench);
|
|
77
|
+
const temporals = deepGetElementsWithFrameTasks(workbench);
|
|
78
|
+
const firstGroup = timegroups[0];
|
|
79
|
+
if (!firstGroup) {
|
|
80
|
+
throw new Error("No temporal elements found");
|
|
81
|
+
}
|
|
82
|
+
firstGroup.currentTimeMs = renderOptions.encoderOptions.fromMs;
|
|
83
|
+
this.frameDurationMs = 1e3 / renderOptions.encoderOptions.video.framerate;
|
|
84
|
+
this.initialBusyTasks = Promise.all(
|
|
85
|
+
temporals.filter((temporal) => temporal.frameTask.status < TaskStatus.COMPLETE).map((temporal) => temporal.frameTask).map((task) => task.taskComplete)
|
|
86
|
+
);
|
|
87
|
+
this.time = 0;
|
|
88
|
+
if (renderOptions.showFrameBox) {
|
|
89
|
+
Object.assign(this.frameBox.style, {
|
|
90
|
+
width: "200px",
|
|
91
|
+
height: "100px",
|
|
92
|
+
font: "30px Arial",
|
|
93
|
+
backgroundColor: "white",
|
|
94
|
+
position: "absolute",
|
|
95
|
+
top: "0px",
|
|
96
|
+
left: "0px",
|
|
97
|
+
zIndex: "100000"
|
|
98
|
+
});
|
|
99
|
+
document.body.prepend(this.frameBox);
|
|
100
|
+
}
|
|
101
|
+
this.audioBufferPromise = firstGroup.renderAudio(
|
|
102
|
+
renderOptions.encoderOptions.alignedFromUs / 1e3,
|
|
103
|
+
renderOptions.encoderOptions.alignedToUs / 1e3
|
|
104
|
+
// renderOptions.encoderOptions.fromMs,
|
|
105
|
+
// renderOptions.encoderOptions.toMs,
|
|
106
|
+
);
|
|
107
|
+
this.log("Initialized");
|
|
108
|
+
}
|
|
109
|
+
async beginFrame(renderId, frameNumber, isLast) {
|
|
110
|
+
if (this.renderOptions?.showFrameBox) {
|
|
111
|
+
this.frameBox.innerHTML = `
|
|
112
|
+
<div>Frame #${frameNumber}</div>
|
|
113
|
+
<div>${this.time.toFixed(4)}</div>
|
|
114
|
+
`;
|
|
115
|
+
}
|
|
116
|
+
const workbench = document.querySelector("ef-workbench");
|
|
117
|
+
workbench.rendering = true;
|
|
118
|
+
const timegroups = shallowGetTimegroups(workbench);
|
|
119
|
+
const temporals = deepGetElementsWithFrameTasks(workbench);
|
|
120
|
+
const firstGroup = timegroups[0];
|
|
121
|
+
this.time = this.renderOptions.encoderOptions.fromMs + frameNumber * this.frameDurationMs;
|
|
122
|
+
firstGroup.currentTimeMs = this.time;
|
|
123
|
+
await this.initialBusyTasks;
|
|
124
|
+
await awaitMicrotask();
|
|
125
|
+
const now = performance.now();
|
|
126
|
+
await Promise.all(
|
|
127
|
+
temporals.filter((temporal) => temporal.frameTask.status < TaskStatus.COMPLETE).map((temporal) => {
|
|
128
|
+
return temporal.frameTask;
|
|
129
|
+
}).map((task) => task.taskComplete)
|
|
130
|
+
);
|
|
131
|
+
console.log(
|
|
132
|
+
`frame:${frameNumber} All tasks complete ${performance.now() - now}ms`
|
|
133
|
+
);
|
|
134
|
+
if (isLast && this.audioBufferPromise) {
|
|
135
|
+
const renderedAudio = await this.audioBufferPromise;
|
|
136
|
+
const channelCount = renderedAudio.numberOfChannels;
|
|
137
|
+
const interleavedSamples = new Float32Array(
|
|
138
|
+
channelCount * renderedAudio.length
|
|
139
|
+
);
|
|
140
|
+
for (let i = 0; i < renderedAudio.length; i++) {
|
|
141
|
+
for (let j = 0; j < channelCount; j++) {
|
|
142
|
+
interleavedSamples.set(
|
|
143
|
+
renderedAudio.getChannelData(j).slice(i, i + 1),
|
|
144
|
+
i * channelCount + j
|
|
145
|
+
);
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
if (this.BRIDGE) {
|
|
149
|
+
this.triggerCanvas.trigger();
|
|
150
|
+
this.BRIDGE.frameReady(
|
|
151
|
+
renderId,
|
|
152
|
+
frameNumber,
|
|
153
|
+
interleavedSamples.buffer
|
|
154
|
+
);
|
|
155
|
+
} else {
|
|
156
|
+
const fileReader = new FileReader();
|
|
157
|
+
fileReader.readAsDataURL(new Blob([interleavedSamples.buffer]));
|
|
158
|
+
await new Promise((resolve, reject) => {
|
|
159
|
+
fileReader.onload = resolve;
|
|
160
|
+
fileReader.onerror = reject;
|
|
161
|
+
});
|
|
162
|
+
return fileReader.result;
|
|
163
|
+
}
|
|
164
|
+
} else {
|
|
165
|
+
if (this.BRIDGE) {
|
|
166
|
+
this.triggerCanvas.trigger();
|
|
167
|
+
this.BRIDGE.frameReady(renderId, frameNumber, new ArrayBuffer(0));
|
|
168
|
+
} else {
|
|
169
|
+
const fileReader = new FileReader();
|
|
170
|
+
fileReader.readAsDataURL(new Blob([]));
|
|
171
|
+
await new Promise((resolve, reject) => {
|
|
172
|
+
fileReader.onload = resolve;
|
|
173
|
+
fileReader.onerror = reject;
|
|
174
|
+
});
|
|
175
|
+
return fileReader.result;
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
window.EF_FRAMEGEN = new EfFramegen();
|
|
181
|
+
export {
|
|
182
|
+
EfFramegen
|
|
183
|
+
};
|
|
@@ -10,8 +10,7 @@ var __decorateClass = (decorators, target, key, kind) => {
|
|
|
10
10
|
for (var i = decorators.length - 1, decorator; i >= 0; i--)
|
|
11
11
|
if (decorator = decorators[i])
|
|
12
12
|
result = (kind ? decorator(target, key, result) : decorator(result)) || result;
|
|
13
|
-
if (kind && result)
|
|
14
|
-
__defProp(target, key, result);
|
|
13
|
+
if (kind && result) __defProp(target, key, result);
|
|
15
14
|
return result;
|
|
16
15
|
};
|
|
17
16
|
let EFAudio = class extends EFMedia {
|
|
@@ -28,13 +27,11 @@ let EFAudio = class extends EFMedia {
|
|
|
28
27
|
this.videoAssetTask.status
|
|
29
28
|
],
|
|
30
29
|
task: async () => {
|
|
31
|
-
console.log("EFAudio frameTask", this.ownCurrentTimeMs);
|
|
32
30
|
await this.trackFragmentIndexLoader.taskComplete;
|
|
33
31
|
await this.initSegmentsLoader.taskComplete;
|
|
34
32
|
await this.seekTask.taskComplete;
|
|
35
33
|
await this.fetchSeekTask.taskComplete;
|
|
36
34
|
await this.videoAssetTask.taskComplete;
|
|
37
|
-
console.log("EFAudio frameTask complete", this.ownCurrentTimeMs);
|
|
38
35
|
this.rootTimegroup?.requestUpdate();
|
|
39
36
|
}
|
|
40
37
|
});
|
|
@@ -15,8 +15,7 @@ var __decorateClass = (decorators, target, key, kind) => {
|
|
|
15
15
|
for (var i = decorators.length - 1, decorator; i >= 0; i--)
|
|
16
16
|
if (decorator = decorators[i])
|
|
17
17
|
result = (kind ? decorator(target, key, result) : decorator(result)) || result;
|
|
18
|
-
if (kind && result)
|
|
19
|
-
__defProp(target, key, result);
|
|
18
|
+
if (kind && result) __defProp(target, key, result);
|
|
20
19
|
return result;
|
|
21
20
|
};
|
|
22
21
|
let EFCaptionsActiveWord = class extends EFTemporal(LitElement) {
|
|
@@ -68,7 +67,7 @@ let EFCaptions = class extends EFSourceMixin(
|
|
|
68
67
|
autoRun: false,
|
|
69
68
|
args: () => [this.target],
|
|
70
69
|
task: async ([], { signal }) => {
|
|
71
|
-
const md5Path = `/@ef-asset/${this.targetElement.
|
|
70
|
+
const md5Path = `/@ef-asset/${this.targetElement.src ?? ""}`;
|
|
72
71
|
const response = await fetch(md5Path, { method: "HEAD", signal });
|
|
73
72
|
return response.headers.get("etag") ?? void 0;
|
|
74
73
|
}
|
|
@@ -90,11 +89,10 @@ let EFCaptions = class extends EFSourceMixin(
|
|
|
90
89
|
});
|
|
91
90
|
}
|
|
92
91
|
captionsPath() {
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
return src.replace("isobmff", "caption");
|
|
92
|
+
if (this.src.startsWith("editframe://") || this.src.startsWith("http")) {
|
|
93
|
+
return this.src.replace("isobmff", "caption");
|
|
96
94
|
}
|
|
97
|
-
return `/@ef-captions/${this.targetElement.
|
|
95
|
+
return `/@ef-captions/${this.targetElement.src ?? ""}`;
|
|
98
96
|
}
|
|
99
97
|
productionSrc() {
|
|
100
98
|
if (!this.md5SumLoader.value) {
|
|
@@ -12,8 +12,7 @@ var __decorateClass = (decorators, target, key, kind) => {
|
|
|
12
12
|
for (var i = decorators.length - 1, decorator; i >= 0; i--)
|
|
13
13
|
if (decorator = decorators[i])
|
|
14
14
|
result = (kind ? decorator(target, key, result) : decorator(result)) || result;
|
|
15
|
-
if (kind && result)
|
|
16
|
-
__defProp(target, key, result);
|
|
15
|
+
if (kind && result) __defProp(target, key, result);
|
|
17
16
|
return result;
|
|
18
17
|
};
|
|
19
18
|
let EFImage = class extends EFSourceMixin(FetchMixin(LitElement), {
|
|
@@ -51,7 +50,7 @@ let EFImage = class extends EFSourceMixin(FetchMixin(LitElement), {
|
|
|
51
50
|
return html`<canvas ${ref(this.canvasRef)}></canvas>`;
|
|
52
51
|
}
|
|
53
52
|
assetPath() {
|
|
54
|
-
if (this.src.startsWith("http")) {
|
|
53
|
+
if (this.src.startsWith("editframe://") || this.src.startsWith("http")) {
|
|
55
54
|
return this.src;
|
|
56
55
|
}
|
|
57
56
|
return `/@ef-image/${this.src}`;
|
|
@@ -3,27 +3,25 @@ import { EFTemporal } from "./EFTemporal.mjs";
|
|
|
3
3
|
import { property, state } from "lit/decorators.js";
|
|
4
4
|
import { deepArrayEquals } from "@lit/task/deep-equals.js";
|
|
5
5
|
import { Task } from "@lit/task";
|
|
6
|
-
import { MP4File } from "
|
|
6
|
+
import { MP4File } from "../../../../lib/av/MP4File.mjs";
|
|
7
7
|
import { getStartTimeMs } from "./util.mjs";
|
|
8
|
-
import { VideoAsset } from "
|
|
8
|
+
import { VideoAsset } from "../../../../lib/av/EncodedAsset.mjs";
|
|
9
9
|
import { FetchMixin } from "./FetchMixin.mjs";
|
|
10
10
|
import { apiHostContext } from "../gui/EFWorkbench.mjs";
|
|
11
11
|
import { consume } from "@lit/context";
|
|
12
12
|
import { EFSourceMixin } from "./EFSourceMixin.mjs";
|
|
13
13
|
import { EF_INTERACTIVE } from "../EF_INTERACTIVE.mjs";
|
|
14
14
|
var __defProp = Object.defineProperty;
|
|
15
|
-
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
16
15
|
var __decorateClass = (decorators, target, key, kind) => {
|
|
17
|
-
var result =
|
|
16
|
+
var result = void 0;
|
|
18
17
|
for (var i = decorators.length - 1, decorator; i >= 0; i--)
|
|
19
18
|
if (decorator = decorators[i])
|
|
20
|
-
result =
|
|
21
|
-
if (
|
|
22
|
-
__defProp(target, key, result);
|
|
19
|
+
result = decorator(target, key, result) || result;
|
|
20
|
+
if (result) __defProp(target, key, result);
|
|
23
21
|
return result;
|
|
24
22
|
};
|
|
25
23
|
const deepGetMediaElements = (element, medias = []) => {
|
|
26
|
-
for (const child of element.children) {
|
|
24
|
+
for (const child of Array.from(element.children)) {
|
|
27
25
|
if (child instanceof EFMedia) {
|
|
28
26
|
medias.push(child);
|
|
29
27
|
} else {
|
|
@@ -41,7 +39,6 @@ class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
41
39
|
this.trackFragmentIndexLoader = new Task(this, {
|
|
42
40
|
args: () => [this.fragmentIndexPath(), this.fetch],
|
|
43
41
|
task: async ([fragmentIndexPath, fetch], { signal }) => {
|
|
44
|
-
console.log("EFMedia trackFragmentIndexLoader");
|
|
45
42
|
const response = await fetch(fragmentIndexPath, { signal });
|
|
46
43
|
return await response.json();
|
|
47
44
|
},
|
|
@@ -54,7 +51,6 @@ class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
54
51
|
autoRun: EF_INTERACTIVE,
|
|
55
52
|
args: () => [this.trackFragmentIndexLoader.value, this.src, this.fetch],
|
|
56
53
|
task: async ([fragmentIndex, _src, fetch], { signal }) => {
|
|
57
|
-
console.log("EFMedia initSegmentsLoader");
|
|
58
54
|
if (!fragmentIndex) {
|
|
59
55
|
return;
|
|
60
56
|
}
|
|
@@ -85,7 +81,6 @@ class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
85
81
|
this.initSegmentsLoader.value
|
|
86
82
|
],
|
|
87
83
|
task: async ([seekToMs, fragmentIndex, initSegments], { signal }) => {
|
|
88
|
-
console.log("EFMedia seekTask");
|
|
89
84
|
if (fragmentIndex === void 0) {
|
|
90
85
|
return;
|
|
91
86
|
}
|
|
@@ -114,7 +109,6 @@ class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
114
109
|
argsEqual: deepArrayEquals,
|
|
115
110
|
args: () => [this.initSegmentsLoader.value, this.seekTask.value, this.fetch],
|
|
116
111
|
task: async ([initSegments, seekResult, fetch], { signal }) => {
|
|
117
|
-
console.log("EFMedia fetchSeekTask");
|
|
118
112
|
if (!initSegments) {
|
|
119
113
|
return;
|
|
120
114
|
}
|
|
@@ -145,7 +139,6 @@ class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
145
139
|
autoRun: EF_INTERACTIVE,
|
|
146
140
|
args: () => [this.fetchSeekTask.value],
|
|
147
141
|
task: async ([files], { signal }) => {
|
|
148
|
-
console.log("EFMedia videoAssetTask");
|
|
149
142
|
if (!files) {
|
|
150
143
|
return;
|
|
151
144
|
}
|
|
@@ -156,6 +149,10 @@ class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
156
149
|
if (!videoFile) {
|
|
157
150
|
return;
|
|
158
151
|
}
|
|
152
|
+
this.videoAssetTask.value?.decodedFrames.forEach(
|
|
153
|
+
(frame) => frame.close()
|
|
154
|
+
);
|
|
155
|
+
this.videoAssetTask.value?.videoDecoder?.close();
|
|
159
156
|
return await VideoAsset.createFromReadableStream(
|
|
160
157
|
"video.mp4",
|
|
161
158
|
videoFile.stream(),
|
|
@@ -169,7 +166,6 @@ class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
169
166
|
autoRun: EF_INTERACTIVE,
|
|
170
167
|
args: () => [this.fetchSeekTask.value, this.seekTask.value],
|
|
171
168
|
task: async ([files, segments], { signal }) => {
|
|
172
|
-
console.log("EFMedia audioBufferTask", this.outerHTML);
|
|
173
169
|
if (!files) {
|
|
174
170
|
return;
|
|
175
171
|
}
|
|
@@ -207,20 +203,17 @@ class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
207
203
|
`
|
|
208
204
|
];
|
|
209
205
|
}
|
|
210
|
-
// get requiredAssets() {
|
|
211
|
-
// return { [this.md5SumLoader.value]: this.requiredAssetsPath.value ?? [] };
|
|
212
|
-
// }
|
|
213
206
|
fragmentIndexPath() {
|
|
214
|
-
if (this.
|
|
215
|
-
return this.
|
|
207
|
+
if (this.src.startsWith("editframe://") || this.src.startsWith("http")) {
|
|
208
|
+
return this.src + "/index";
|
|
216
209
|
}
|
|
217
210
|
return `/@ef-track-fragment-index/${this.getAttribute("src") ?? ""}`;
|
|
218
211
|
}
|
|
219
212
|
fragmentTrackPath(trackId) {
|
|
220
|
-
if (this.
|
|
221
|
-
return this.
|
|
213
|
+
if (this.src.startsWith("editframe://") || this.src.startsWith("http")) {
|
|
214
|
+
return this.src.replace("files", "tracks") + `/${trackId}`;
|
|
222
215
|
}
|
|
223
|
-
return `/@ef-track/${this.
|
|
216
|
+
return `/@ef-track/${this.src ?? ""}?trackId=${trackId}`;
|
|
224
217
|
}
|
|
225
218
|
get defaultVideoTrackId() {
|
|
226
219
|
return Object.values(this.trackFragmentIndexLoader.value ?? {}).find(
|
|
@@ -290,12 +283,10 @@ class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
290
283
|
return segmentStartsBeforeEnd && segmentEndsAfterStart;
|
|
291
284
|
}
|
|
292
285
|
);
|
|
293
|
-
console.log("FRAGMENTS SPANNING TIME", JSON.stringify(fragments));
|
|
294
286
|
const firstFragment = fragments[0];
|
|
295
287
|
const lastFragment = fragments[fragments.length - 1];
|
|
296
288
|
const fragmentStart = firstFragment.offset;
|
|
297
289
|
const fragmentEnd = lastFragment.offset + lastFragment.size - 1;
|
|
298
|
-
console.log("FETCHING BYTES", `bytes=${fragmentStart}-${fragmentEnd}`);
|
|
299
290
|
const audioFragmentRequest = this.fetch(
|
|
300
291
|
this.fragmentTrackPath(String(audioTrackId)),
|
|
301
292
|
{
|
|
@@ -318,14 +309,14 @@ class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
|
318
309
|
}
|
|
319
310
|
__decorateClass([
|
|
320
311
|
property({ type: Number })
|
|
321
|
-
], EFMedia.prototype, "currentTimeMs"
|
|
312
|
+
], EFMedia.prototype, "currentTimeMs");
|
|
322
313
|
__decorateClass([
|
|
323
314
|
consume({ context: apiHostContext, subscribe: true }),
|
|
324
315
|
state()
|
|
325
|
-
], EFMedia.prototype, "efHost"
|
|
316
|
+
], EFMedia.prototype, "efHost");
|
|
326
317
|
__decorateClass([
|
|
327
318
|
state()
|
|
328
|
-
], EFMedia.prototype, "desiredSeekTimeMs"
|
|
319
|
+
], EFMedia.prototype, "desiredSeekTimeMs");
|
|
329
320
|
export {
|
|
330
321
|
EFMedia,
|
|
331
322
|
deepGetMediaElements
|