@editframe/elements 0.6.0-beta.11 → 0.6.0-beta.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. package/package.json +2 -2
  2. package/dist/lib/av/EncodedAsset.cjs +0 -561
  3. package/dist/lib/av/EncodedAsset.js +0 -544
  4. package/dist/lib/av/MP4File.cjs +0 -182
  5. package/dist/lib/av/MP4File.js +0 -165
  6. package/dist/lib/av/msToTimeCode.cjs +0 -15
  7. package/dist/lib/av/msToTimeCode.js +0 -15
  8. package/dist/lib/util/awaitMicrotask.cjs +0 -8
  9. package/dist/lib/util/awaitMicrotask.js +0 -8
  10. package/dist/lib/util/memoize.cjs +0 -14
  11. package/dist/lib/util/memoize.js +0 -14
  12. package/dist/packages/elements/src/EF_FRAMEGEN.cjs +0 -197
  13. package/dist/packages/elements/src/EF_FRAMEGEN.d.ts +0 -45
  14. package/dist/packages/elements/src/EF_FRAMEGEN.js +0 -197
  15. package/dist/packages/elements/src/EF_INTERACTIVE.cjs +0 -4
  16. package/dist/packages/elements/src/EF_INTERACTIVE.d.ts +0 -1
  17. package/dist/packages/elements/src/EF_INTERACTIVE.js +0 -4
  18. package/dist/packages/elements/src/elements/CrossUpdateController.cjs +0 -16
  19. package/dist/packages/elements/src/elements/CrossUpdateController.d.ts +0 -9
  20. package/dist/packages/elements/src/elements/CrossUpdateController.js +0 -16
  21. package/dist/packages/elements/src/elements/EFAudio.cjs +0 -53
  22. package/dist/packages/elements/src/elements/EFAudio.d.ts +0 -10
  23. package/dist/packages/elements/src/elements/EFAudio.js +0 -54
  24. package/dist/packages/elements/src/elements/EFCaptions.cjs +0 -171
  25. package/dist/packages/elements/src/elements/EFCaptions.d.ts +0 -39
  26. package/dist/packages/elements/src/elements/EFCaptions.js +0 -173
  27. package/dist/packages/elements/src/elements/EFImage.cjs +0 -79
  28. package/dist/packages/elements/src/elements/EFImage.d.ts +0 -14
  29. package/dist/packages/elements/src/elements/EFImage.js +0 -80
  30. package/dist/packages/elements/src/elements/EFMedia.cjs +0 -334
  31. package/dist/packages/elements/src/elements/EFMedia.d.ts +0 -61
  32. package/dist/packages/elements/src/elements/EFMedia.js +0 -334
  33. package/dist/packages/elements/src/elements/EFSourceMixin.cjs +0 -55
  34. package/dist/packages/elements/src/elements/EFSourceMixin.d.ts +0 -12
  35. package/dist/packages/elements/src/elements/EFSourceMixin.js +0 -55
  36. package/dist/packages/elements/src/elements/EFTemporal.cjs +0 -198
  37. package/dist/packages/elements/src/elements/EFTemporal.d.ts +0 -36
  38. package/dist/packages/elements/src/elements/EFTemporal.js +0 -198
  39. package/dist/packages/elements/src/elements/EFTimegroup.browsertest.d.ts +0 -12
  40. package/dist/packages/elements/src/elements/EFTimegroup.cjs +0 -343
  41. package/dist/packages/elements/src/elements/EFTimegroup.d.ts +0 -39
  42. package/dist/packages/elements/src/elements/EFTimegroup.js +0 -344
  43. package/dist/packages/elements/src/elements/EFTimeline.cjs +0 -15
  44. package/dist/packages/elements/src/elements/EFTimeline.d.ts +0 -3
  45. package/dist/packages/elements/src/elements/EFTimeline.js +0 -15
  46. package/dist/packages/elements/src/elements/EFVideo.cjs +0 -110
  47. package/dist/packages/elements/src/elements/EFVideo.d.ts +0 -14
  48. package/dist/packages/elements/src/elements/EFVideo.js +0 -111
  49. package/dist/packages/elements/src/elements/EFWaveform.cjs +0 -235
  50. package/dist/packages/elements/src/elements/EFWaveform.d.ts +0 -28
  51. package/dist/packages/elements/src/elements/EFWaveform.js +0 -219
  52. package/dist/packages/elements/src/elements/FetchMixin.cjs +0 -28
  53. package/dist/packages/elements/src/elements/FetchMixin.d.ts +0 -8
  54. package/dist/packages/elements/src/elements/FetchMixin.js +0 -28
  55. package/dist/packages/elements/src/elements/TimegroupController.cjs +0 -20
  56. package/dist/packages/elements/src/elements/TimegroupController.d.ts +0 -14
  57. package/dist/packages/elements/src/elements/TimegroupController.js +0 -20
  58. package/dist/packages/elements/src/elements/durationConverter.cjs +0 -8
  59. package/dist/packages/elements/src/elements/durationConverter.d.ts +0 -4
  60. package/dist/packages/elements/src/elements/durationConverter.js +0 -8
  61. package/dist/packages/elements/src/elements/parseTimeToMs.cjs +0 -12
  62. package/dist/packages/elements/src/elements/parseTimeToMs.d.ts +0 -1
  63. package/dist/packages/elements/src/elements/parseTimeToMs.js +0 -12
  64. package/dist/packages/elements/src/elements/util.cjs +0 -11
  65. package/dist/packages/elements/src/elements/util.d.ts +0 -4
  66. package/dist/packages/elements/src/elements/util.js +0 -11
  67. package/dist/packages/elements/src/gui/EFFilmstrip.cjs +0 -675
  68. package/dist/packages/elements/src/gui/EFFilmstrip.d.ts +0 -138
  69. package/dist/packages/elements/src/gui/EFFilmstrip.js +0 -683
  70. package/dist/packages/elements/src/gui/EFWorkbench.cjs +0 -199
  71. package/dist/packages/elements/src/gui/EFWorkbench.d.ts +0 -44
  72. package/dist/packages/elements/src/gui/EFWorkbench.js +0 -200
  73. package/dist/packages/elements/src/gui/TWMixin.cjs +0 -28
  74. package/dist/packages/elements/src/gui/TWMixin.css.cjs +0 -3
  75. package/dist/packages/elements/src/gui/TWMixin.css.js +0 -4
  76. package/dist/packages/elements/src/gui/TWMixin.d.ts +0 -3
  77. package/dist/packages/elements/src/gui/TWMixin.js +0 -28
  78. package/dist/packages/elements/src/index.cjs +0 -47
  79. package/dist/packages/elements/src/index.d.ts +0 -10
  80. package/dist/packages/elements/src/index.js +0 -23
  81. package/dist/style.css +0 -766
@@ -1,182 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
3
- const MP4Box = require("mp4box");
4
- function _interopNamespaceDefault(e) {
5
- const n = Object.create(null, { [Symbol.toStringTag]: { value: "Module" } });
6
- if (e) {
7
- for (const k in e) {
8
- if (k !== "default") {
9
- const d = Object.getOwnPropertyDescriptor(e, k);
10
- Object.defineProperty(n, k, d.get ? d : {
11
- enumerable: true,
12
- get: () => e[k]
13
- });
14
- }
15
- }
16
- }
17
- n.default = e;
18
- return Object.freeze(n);
19
- }
20
- const MP4Box__namespace = /* @__PURE__ */ _interopNamespaceDefault(MP4Box);
21
- class MP4File extends MP4Box__namespace.ISOFile {
22
- constructor() {
23
- super(...arguments);
24
- this.readyPromise = new Promise((resolve, reject) => {
25
- this.onReady = () => resolve();
26
- this.onError = reject;
27
- });
28
- this.waitingForSamples = [];
29
- this._hasSeenLastSamples = false;
30
- this._arrayBufferFileStart = 0;
31
- }
32
- setSegmentOptions(id, user, options) {
33
- const trak = this.getTrackById(id);
34
- if (trak) {
35
- trak.nextSample = 0;
36
- this.fragmentedTracks.push({
37
- id,
38
- user,
39
- trak,
40
- segmentStream: null,
41
- nb_samples: "nbSamples" in options && options.nbSamples || 1e3,
42
- rapAlignement: ("rapAlignement" in options && options.rapAlignement) ?? true
43
- });
44
- }
45
- }
46
- /**
47
- * Fragments all tracks in a file into separate array buffers.
48
- */
49
- async fragmentAllTracks() {
50
- const trackBuffers = {};
51
- for await (const segment of this.fragmentIterator()) {
52
- (trackBuffers[segment.track] ??= []).push(segment.data);
53
- }
54
- return trackBuffers;
55
- }
56
- async *fragmentIterator() {
57
- await this.readyPromise;
58
- const trackInfo = {};
59
- for (const videoTrack of this.getInfo().videoTracks) {
60
- trackInfo[videoTrack.id] = { index: 0, complete: false };
61
- this.setSegmentOptions(videoTrack.id, null, {
62
- rapAlignement: true
63
- });
64
- }
65
- for (const audioTrack of this.getInfo().audioTracks) {
66
- trackInfo[audioTrack.id] = { index: 0, complete: false };
67
- const sampleRate = audioTrack.audio.sample_rate;
68
- const probablePacketSize = 1024;
69
- const probableFourSecondsOfSamples = Math.ceil(
70
- sampleRate / probablePacketSize * 4
71
- );
72
- this.setSegmentOptions(audioTrack.id, null, {
73
- nbSamples: probableFourSecondsOfSamples
74
- });
75
- }
76
- const initSegments = this.initializeSegmentation();
77
- for (const initSegment of initSegments) {
78
- yield {
79
- track: initSegment.id,
80
- segment: "init",
81
- data: initSegment.buffer,
82
- complete: false
83
- };
84
- }
85
- const fragmentStartSamples = {};
86
- let finishedReading = false;
87
- const allTracksFinished = () => {
88
- for (const fragmentedTrack of this.fragmentedTracks) {
89
- if (!trackInfo[fragmentedTrack.id]?.complete) {
90
- return false;
91
- }
92
- }
93
- return true;
94
- };
95
- while (!(finishedReading && allTracksFinished())) {
96
- for (const fragTrak of this.fragmentedTracks) {
97
- const trak = fragTrak.trak;
98
- if (trak.nextSample === void 0) {
99
- throw new Error("trak.nextSample is undefined");
100
- }
101
- if (trak.samples === void 0) {
102
- throw new Error("trak.samples is undefined");
103
- }
104
- while (trak.nextSample < trak.samples.length) {
105
- let result = void 0;
106
- const fragTrakNextSample = trak.samples[trak.nextSample];
107
- if (fragTrakNextSample) {
108
- fragmentStartSamples[fragTrak.id] ||= fragTrakNextSample;
109
- }
110
- try {
111
- result = this.createFragment(
112
- fragTrak.id,
113
- trak.nextSample,
114
- fragTrak.segmentStream
115
- );
116
- } catch (error) {
117
- console.log("Failed to createFragment", error);
118
- }
119
- if (result) {
120
- fragTrak.segmentStream = result;
121
- trak.nextSample++;
122
- } else {
123
- finishedReading = await this.waitForMoreSamples();
124
- break;
125
- }
126
- const nextSample = trak.samples[trak.nextSample];
127
- const emitSegment = (
128
- // if rapAlignement is true, we emit a fragment when we have a rap sample coming up next
129
- fragTrak.rapAlignement === true && nextSample?.is_sync || // if rapAlignement is false, we emit a fragment when we have the required number of samples
130
- !fragTrak.rapAlignement && trak.nextSample % fragTrak.nb_samples === 0 || // // if this is the last sample, we emit the fragment
131
- // finished ||
132
- // if we have more samples than the number of samples requested, we emit the fragment
133
- trak.nextSample >= trak.samples.length
134
- );
135
- if (emitSegment) {
136
- const trackInfoForFrag = trackInfo[fragTrak.id];
137
- if (!trackInfoForFrag) {
138
- throw new Error("trackInfoForFrag is undefined");
139
- }
140
- if (trak.nextSample >= trak.samples.length) {
141
- trackInfoForFrag.complete = true;
142
- }
143
- const startSample = fragmentStartSamples[fragTrak.id];
144
- const endSample = trak.samples[trak.nextSample - 1];
145
- if (!startSample || !endSample) {
146
- throw new Error("startSample or endSample is undefined");
147
- }
148
- yield {
149
- track: fragTrak.id,
150
- segment: trackInfoForFrag.index,
151
- data: fragTrak.segmentStream.buffer,
152
- complete: trackInfoForFrag.complete,
153
- cts: startSample.cts,
154
- dts: startSample.dts,
155
- duration: endSample.cts - startSample.cts + endSample.duration
156
- };
157
- trackInfoForFrag.index += 1;
158
- fragTrak.segmentStream = null;
159
- delete fragmentStartSamples[fragTrak.id];
160
- }
161
- }
162
- }
163
- finishedReading = await this.waitForMoreSamples();
164
- }
165
- }
166
- waitForMoreSamples() {
167
- if (this._hasSeenLastSamples) {
168
- return Promise.resolve(true);
169
- }
170
- return new Promise((resolve) => {
171
- this.waitingForSamples.push(resolve);
172
- });
173
- }
174
- processSamples(last) {
175
- this._hasSeenLastSamples = last;
176
- for (const observer of this.waitingForSamples) {
177
- observer(last);
178
- }
179
- this.waitingForSamples = [];
180
- }
181
- }
182
- exports.MP4File = MP4File;
@@ -1,165 +0,0 @@
1
- import * as MP4Box from "mp4box";
2
- class MP4File extends MP4Box.ISOFile {
3
- constructor() {
4
- super(...arguments);
5
- this.readyPromise = new Promise((resolve, reject) => {
6
- this.onReady = () => resolve();
7
- this.onError = reject;
8
- });
9
- this.waitingForSamples = [];
10
- this._hasSeenLastSamples = false;
11
- this._arrayBufferFileStart = 0;
12
- }
13
- setSegmentOptions(id, user, options) {
14
- const trak = this.getTrackById(id);
15
- if (trak) {
16
- trak.nextSample = 0;
17
- this.fragmentedTracks.push({
18
- id,
19
- user,
20
- trak,
21
- segmentStream: null,
22
- nb_samples: "nbSamples" in options && options.nbSamples || 1e3,
23
- rapAlignement: ("rapAlignement" in options && options.rapAlignement) ?? true
24
- });
25
- }
26
- }
27
- /**
28
- * Fragments all tracks in a file into separate array buffers.
29
- */
30
- async fragmentAllTracks() {
31
- const trackBuffers = {};
32
- for await (const segment of this.fragmentIterator()) {
33
- (trackBuffers[segment.track] ??= []).push(segment.data);
34
- }
35
- return trackBuffers;
36
- }
37
- async *fragmentIterator() {
38
- await this.readyPromise;
39
- const trackInfo = {};
40
- for (const videoTrack of this.getInfo().videoTracks) {
41
- trackInfo[videoTrack.id] = { index: 0, complete: false };
42
- this.setSegmentOptions(videoTrack.id, null, {
43
- rapAlignement: true
44
- });
45
- }
46
- for (const audioTrack of this.getInfo().audioTracks) {
47
- trackInfo[audioTrack.id] = { index: 0, complete: false };
48
- const sampleRate = audioTrack.audio.sample_rate;
49
- const probablePacketSize = 1024;
50
- const probableFourSecondsOfSamples = Math.ceil(
51
- sampleRate / probablePacketSize * 4
52
- );
53
- this.setSegmentOptions(audioTrack.id, null, {
54
- nbSamples: probableFourSecondsOfSamples
55
- });
56
- }
57
- const initSegments = this.initializeSegmentation();
58
- for (const initSegment of initSegments) {
59
- yield {
60
- track: initSegment.id,
61
- segment: "init",
62
- data: initSegment.buffer,
63
- complete: false
64
- };
65
- }
66
- const fragmentStartSamples = {};
67
- let finishedReading = false;
68
- const allTracksFinished = () => {
69
- for (const fragmentedTrack of this.fragmentedTracks) {
70
- if (!trackInfo[fragmentedTrack.id]?.complete) {
71
- return false;
72
- }
73
- }
74
- return true;
75
- };
76
- while (!(finishedReading && allTracksFinished())) {
77
- for (const fragTrak of this.fragmentedTracks) {
78
- const trak = fragTrak.trak;
79
- if (trak.nextSample === void 0) {
80
- throw new Error("trak.nextSample is undefined");
81
- }
82
- if (trak.samples === void 0) {
83
- throw new Error("trak.samples is undefined");
84
- }
85
- while (trak.nextSample < trak.samples.length) {
86
- let result = void 0;
87
- const fragTrakNextSample = trak.samples[trak.nextSample];
88
- if (fragTrakNextSample) {
89
- fragmentStartSamples[fragTrak.id] ||= fragTrakNextSample;
90
- }
91
- try {
92
- result = this.createFragment(
93
- fragTrak.id,
94
- trak.nextSample,
95
- fragTrak.segmentStream
96
- );
97
- } catch (error) {
98
- console.log("Failed to createFragment", error);
99
- }
100
- if (result) {
101
- fragTrak.segmentStream = result;
102
- trak.nextSample++;
103
- } else {
104
- finishedReading = await this.waitForMoreSamples();
105
- break;
106
- }
107
- const nextSample = trak.samples[trak.nextSample];
108
- const emitSegment = (
109
- // if rapAlignement is true, we emit a fragment when we have a rap sample coming up next
110
- fragTrak.rapAlignement === true && nextSample?.is_sync || // if rapAlignement is false, we emit a fragment when we have the required number of samples
111
- !fragTrak.rapAlignement && trak.nextSample % fragTrak.nb_samples === 0 || // // if this is the last sample, we emit the fragment
112
- // finished ||
113
- // if we have more samples than the number of samples requested, we emit the fragment
114
- trak.nextSample >= trak.samples.length
115
- );
116
- if (emitSegment) {
117
- const trackInfoForFrag = trackInfo[fragTrak.id];
118
- if (!trackInfoForFrag) {
119
- throw new Error("trackInfoForFrag is undefined");
120
- }
121
- if (trak.nextSample >= trak.samples.length) {
122
- trackInfoForFrag.complete = true;
123
- }
124
- const startSample = fragmentStartSamples[fragTrak.id];
125
- const endSample = trak.samples[trak.nextSample - 1];
126
- if (!startSample || !endSample) {
127
- throw new Error("startSample or endSample is undefined");
128
- }
129
- yield {
130
- track: fragTrak.id,
131
- segment: trackInfoForFrag.index,
132
- data: fragTrak.segmentStream.buffer,
133
- complete: trackInfoForFrag.complete,
134
- cts: startSample.cts,
135
- dts: startSample.dts,
136
- duration: endSample.cts - startSample.cts + endSample.duration
137
- };
138
- trackInfoForFrag.index += 1;
139
- fragTrak.segmentStream = null;
140
- delete fragmentStartSamples[fragTrak.id];
141
- }
142
- }
143
- }
144
- finishedReading = await this.waitForMoreSamples();
145
- }
146
- }
147
- waitForMoreSamples() {
148
- if (this._hasSeenLastSamples) {
149
- return Promise.resolve(true);
150
- }
151
- return new Promise((resolve) => {
152
- this.waitingForSamples.push(resolve);
153
- });
154
- }
155
- processSamples(last) {
156
- this._hasSeenLastSamples = last;
157
- for (const observer of this.waitingForSamples) {
158
- observer(last);
159
- }
160
- this.waitingForSamples = [];
161
- }
162
- }
163
- export {
164
- MP4File
165
- };
@@ -1,15 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
3
- const msToTimeCode = (ms, subSecond = false) => {
4
- const seconds = Math.floor(ms / 1e3);
5
- const minutes = Math.floor(seconds / 60);
6
- const hours = Math.floor(minutes / 60);
7
- const pad = (num) => num.toString().padStart(2, "0");
8
- let timecode = `${pad(hours)}:${pad(minutes % 60)}:${pad(seconds % 60)}`;
9
- if (subSecond) {
10
- const subSeconds = Math.floor(ms % 1e3 / 10);
11
- timecode += `.${subSeconds.toString().padStart(2, "0")}`;
12
- }
13
- return timecode;
14
- };
15
- exports.msToTimeCode = msToTimeCode;
@@ -1,15 +0,0 @@
1
- const msToTimeCode = (ms, subSecond = false) => {
2
- const seconds = Math.floor(ms / 1e3);
3
- const minutes = Math.floor(seconds / 60);
4
- const hours = Math.floor(minutes / 60);
5
- const pad = (num) => num.toString().padStart(2, "0");
6
- let timecode = `${pad(hours)}:${pad(minutes % 60)}:${pad(seconds % 60)}`;
7
- if (subSecond) {
8
- const subSeconds = Math.floor(ms % 1e3 / 10);
9
- timecode += `.${subSeconds.toString().padStart(2, "0")}`;
10
- }
11
- return timecode;
12
- };
13
- export {
14
- msToTimeCode
15
- };
@@ -1,8 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
3
- const awaitMicrotask = async () => {
4
- await new Promise((resolve) => {
5
- queueMicrotask(resolve);
6
- });
7
- };
8
- exports.awaitMicrotask = awaitMicrotask;
@@ -1,8 +0,0 @@
1
- const awaitMicrotask = async () => {
2
- await new Promise((resolve) => {
3
- queueMicrotask(resolve);
4
- });
5
- };
6
- export {
7
- awaitMicrotask
8
- };
@@ -1,14 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
3
- const memoize = (_target, _propertyKey, descriptor) => {
4
- const get = descriptor.get;
5
- if (!get) return;
6
- const memoized = /* @__PURE__ */ new WeakMap();
7
- descriptor.get = function() {
8
- if (!memoized.has(this)) {
9
- memoized.set(this, get.call(this));
10
- }
11
- return memoized.get(this);
12
- };
13
- };
14
- exports.memoize = memoize;
@@ -1,14 +0,0 @@
1
- const memoize = (_target, _propertyKey, descriptor) => {
2
- const get = descriptor.get;
3
- if (!get) return;
4
- const memoized = /* @__PURE__ */ new WeakMap();
5
- descriptor.get = function() {
6
- if (!memoized.has(this)) {
7
- memoized.set(this, get.call(this));
8
- }
9
- return memoized.get(this);
10
- };
11
- };
12
- export {
13
- memoize
14
- };
@@ -1,197 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
3
- const task = require("@lit/task");
4
- const awaitMicrotask = require("../../../lib/util/awaitMicrotask.cjs");
5
- const EFTemporal = require("./elements/EFTemporal.cjs");
6
- const EFTimegroup = require("./elements/EFTimegroup.cjs");
7
- class TriggerCanvas {
8
- constructor() {
9
- this.canvas = document.createElement("canvas");
10
- this.canvas.width = 1;
11
- this.canvas.height = 1;
12
- Object.assign(this.canvas.style, {
13
- position: "fixed",
14
- top: "0px",
15
- left: "0px",
16
- width: "1px",
17
- height: "1px",
18
- zIndex: "100000"
19
- });
20
- document.body.prepend(this.canvas);
21
- const ctx = this.canvas.getContext("2d", { willReadFrequently: true });
22
- if (!ctx) throw new Error("Canvas 2d context not ready");
23
- this.ctx = ctx;
24
- this.ctx.fillStyle = "black";
25
- }
26
- trigger() {
27
- console.log("TRIGGERING CANVAS");
28
- this.ctx.clearRect(0, 0, this.canvas.width, this.canvas.height);
29
- }
30
- }
31
- class EfFramegen {
32
- constructor() {
33
- this.time = 0;
34
- this.frameDurationMs = 0;
35
- this.initialBusyTasks = Promise.resolve([]);
36
- this.frameBox = document.createElement("div");
37
- this.BRIDGE = window.FRAMEGEN_BRIDGE;
38
- this.triggerCanvas = new TriggerCanvas();
39
- if (this.BRIDGE) {
40
- this.connectToBridge();
41
- }
42
- }
43
- log(...args) {
44
- console.log("[EF_FRAMEGEN]", ...args);
45
- }
46
- trace(...args) {
47
- console.trace("[EF_FRAMEGEN]", ...args);
48
- }
49
- connectToBridge() {
50
- const BRIDGE = this.BRIDGE;
51
- if (!BRIDGE) {
52
- throw new Error("No BRIDGE when attempting to connect to bridge");
53
- }
54
- BRIDGE.onInitialize(async (renderId, renderOptions) => {
55
- this.log("BRIDGE.onInitialize", renderId, renderOptions);
56
- await this.initialize(renderId, renderOptions);
57
- BRIDGE.initialized(renderId);
58
- });
59
- BRIDGE.onBeginFrame((renderId, frameNumber, isLast) => {
60
- this.log("BRIDGE.onBeginFrame", renderId, frameNumber, isLast);
61
- this.beginFrame(renderId, frameNumber, isLast);
62
- });
63
- }
64
- async initialize(renderId, renderOptions) {
65
- addEventListener("unhandledrejection", (event) => {
66
- this.trace("Unhandled rejection:", event.reason);
67
- if (this.BRIDGE) {
68
- this.BRIDGE.error(renderId, event.reason);
69
- }
70
- });
71
- addEventListener("error", (event) => {
72
- this.trace("Uncaught error", event.error);
73
- if (this.BRIDGE) {
74
- this.BRIDGE.error(renderId, event.error);
75
- }
76
- });
77
- this.renderOptions = renderOptions;
78
- const workbench = document.querySelector("ef-workbench");
79
- if (!workbench) {
80
- throw new Error("No workbench found");
81
- }
82
- workbench.rendering = true;
83
- const timegroups = EFTimegroup.shallowGetTimegroups(workbench);
84
- const temporals = EFTemporal.deepGetElementsWithFrameTasks(workbench);
85
- const firstGroup = timegroups[0];
86
- if (!firstGroup) {
87
- throw new Error("No temporal elements found");
88
- }
89
- firstGroup.currentTimeMs = renderOptions.encoderOptions.fromMs;
90
- this.frameDurationMs = 1e3 / renderOptions.encoderOptions.video.framerate;
91
- this.initialBusyTasks = Promise.all(
92
- temporals.filter((temporal) => temporal.frameTask.status < task.TaskStatus.COMPLETE).map((temporal) => temporal.frameTask).map((task2) => task2.taskComplete)
93
- );
94
- this.time = 0;
95
- if (renderOptions.showFrameBox) {
96
- Object.assign(this.frameBox.style, {
97
- width: "200px",
98
- height: "100px",
99
- font: "30px Arial",
100
- backgroundColor: "white",
101
- position: "absolute",
102
- top: "0px",
103
- left: "0px",
104
- zIndex: "100000"
105
- });
106
- document.body.prepend(this.frameBox);
107
- }
108
- this.audioBufferPromise = firstGroup.renderAudio(
109
- renderOptions.encoderOptions.alignedFromUs / 1e3,
110
- renderOptions.encoderOptions.alignedToUs / 1e3
111
- // renderOptions.encoderOptions.fromMs,
112
- // renderOptions.encoderOptions.toMs,
113
- );
114
- this.log("Initialized");
115
- }
116
- async beginFrame(renderId, frameNumber, isLast) {
117
- if (this.renderOptions === void 0) {
118
- throw new Error("No renderOptions");
119
- }
120
- if (this.renderOptions.showFrameBox) {
121
- this.frameBox.innerHTML = `
122
- <div>Frame #${frameNumber}</div>
123
- <div>${this.time.toFixed(4)}</div>
124
- `;
125
- }
126
- const workbench = document.querySelector("ef-workbench");
127
- if (!workbench) {
128
- throw new Error("No workbench found");
129
- }
130
- workbench.rendering = true;
131
- const timegroups = EFTimegroup.shallowGetTimegroups(workbench);
132
- const temporals = EFTemporal.deepGetElementsWithFrameTasks(workbench);
133
- const firstGroup = timegroups[0];
134
- if (!firstGroup) {
135
- throw new Error("No temporal elements found");
136
- }
137
- this.time = this.renderOptions.encoderOptions.fromMs + frameNumber * this.frameDurationMs;
138
- firstGroup.currentTimeMs = this.time;
139
- await this.initialBusyTasks;
140
- await awaitMicrotask.awaitMicrotask();
141
- const now = performance.now();
142
- await Promise.all(
143
- temporals.filter((temporal) => temporal.frameTask.status < task.TaskStatus.COMPLETE).map((temporal) => {
144
- return temporal.frameTask;
145
- }).map((task2) => task2.taskComplete)
146
- );
147
- console.log(
148
- `frame:${frameNumber} All tasks complete ${performance.now() - now}ms`
149
- );
150
- if (isLast && this.audioBufferPromise) {
151
- const renderedAudio = await this.audioBufferPromise;
152
- const channelCount = renderedAudio.numberOfChannels;
153
- const interleavedSamples = new Float32Array(
154
- channelCount * renderedAudio.length
155
- );
156
- for (let i = 0; i < renderedAudio.length; i++) {
157
- for (let j = 0; j < channelCount; j++) {
158
- interleavedSamples.set(
159
- renderedAudio.getChannelData(j).slice(i, i + 1),
160
- i * channelCount + j
161
- );
162
- }
163
- }
164
- if (this.BRIDGE) {
165
- this.triggerCanvas.trigger();
166
- this.BRIDGE.frameReady(
167
- renderId,
168
- frameNumber,
169
- interleavedSamples.buffer
170
- );
171
- } else {
172
- const fileReader = new FileReader();
173
- fileReader.readAsDataURL(new Blob([interleavedSamples.buffer]));
174
- await new Promise((resolve, reject) => {
175
- fileReader.onload = resolve;
176
- fileReader.onerror = reject;
177
- });
178
- return fileReader.result;
179
- }
180
- } else {
181
- if (this.BRIDGE) {
182
- this.triggerCanvas.trigger();
183
- this.BRIDGE.frameReady(renderId, frameNumber, new ArrayBuffer(0));
184
- } else {
185
- const fileReader = new FileReader();
186
- fileReader.readAsDataURL(new Blob([]));
187
- await new Promise((resolve, reject) => {
188
- fileReader.onload = resolve;
189
- fileReader.onerror = reject;
190
- });
191
- return fileReader.result;
192
- }
193
- }
194
- }
195
- }
196
- window.EF_FRAMEGEN = new EfFramegen();
197
- exports.EfFramegen = EfFramegen;
@@ -1,45 +0,0 @@
1
- import { VideoRenderOptions } from '../../assets';
2
-
3
- declare global {
4
- interface Window {
5
- EF_FRAMEGEN?: EfFramegen;
6
- FRAMEGEN_BRIDGE?: {
7
- onInitialize: (callback: (renderId: string, renderOptions: VideoRenderOptions) => void) => void;
8
- initialized(renderId: string): void;
9
- onBeginFrame(callback: (renderId: string, frameNumber: number, isLast: boolean) => void): void;
10
- onTriggerCanvas(callback: () => void): void;
11
- frameReady(renderId: string, frameNumber: number, audioSamples: ArrayBuffer): void;
12
- error(renderId: string, error: Error): void;
13
- };
14
- }
15
- }
16
- declare class TriggerCanvas {
17
- private canvas;
18
- private ctx;
19
- constructor();
20
- trigger(): void;
21
- }
22
- export declare class EfFramegen {
23
- time: number;
24
- frameDurationMs: number;
25
- initialBusyTasks: Promise<unknown[]>;
26
- audioBufferPromise?: Promise<AudioBuffer>;
27
- renderOptions?: VideoRenderOptions;
28
- frameBox: HTMLDivElement;
29
- BRIDGE: {
30
- onInitialize: (callback: (renderId: string, renderOptions: VideoRenderOptions) => void) => void;
31
- initialized(renderId: string): void;
32
- onBeginFrame(callback: (renderId: string, frameNumber: number, isLast: boolean) => void): void;
33
- onTriggerCanvas(callback: () => void): void;
34
- frameReady(renderId: string, frameNumber: number, audioSamples: ArrayBuffer): void;
35
- error(renderId: string, error: Error): void;
36
- } | undefined;
37
- triggerCanvas: TriggerCanvas;
38
- log(...args: any[]): void;
39
- trace(...args: any[]): void;
40
- constructor();
41
- connectToBridge(): void;
42
- initialize(renderId: string, renderOptions: VideoRenderOptions): Promise<void>;
43
- beginFrame(renderId: string, frameNumber: number, isLast: boolean): Promise<string | ArrayBuffer | null | undefined>;
44
- }
45
- export {};