@editframe/elements 0.6.0-beta.9 → 0.7.0-beta.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (98) hide show
  1. package/dist/lib/av/EncodedAsset.cjs +577 -0
  2. package/dist/lib/av/EncodedAsset.js +560 -0
  3. package/dist/lib/av/MP4File.cjs +187 -0
  4. package/dist/lib/av/MP4File.js +170 -0
  5. package/dist/lib/av/msToTimeCode.cjs +15 -0
  6. package/dist/lib/av/msToTimeCode.js +15 -0
  7. package/dist/lib/util/awaitMicrotask.cjs +4 -0
  8. package/dist/lib/util/awaitMicrotask.js +4 -0
  9. package/dist/lib/util/memoize.cjs +14 -0
  10. package/dist/lib/util/memoize.js +14 -0
  11. package/dist/packages/elements/src/EF_FRAMEGEN.cjs +197 -0
  12. package/dist/packages/elements/src/EF_FRAMEGEN.d.ts +44 -0
  13. package/dist/packages/elements/src/EF_FRAMEGEN.js +197 -0
  14. package/dist/packages/elements/src/EF_INTERACTIVE.cjs +4 -0
  15. package/dist/packages/elements/src/EF_INTERACTIVE.d.ts +1 -0
  16. package/dist/packages/elements/src/EF_INTERACTIVE.js +4 -0
  17. package/dist/packages/elements/src/elements/CrossUpdateController.cjs +16 -0
  18. package/dist/packages/elements/src/elements/CrossUpdateController.d.ts +9 -0
  19. package/dist/packages/elements/src/elements/CrossUpdateController.js +16 -0
  20. package/dist/packages/elements/src/elements/EFAudio.cjs +53 -0
  21. package/dist/packages/elements/src/elements/EFAudio.d.ts +10 -0
  22. package/dist/packages/elements/src/elements/EFAudio.js +54 -0
  23. package/dist/packages/elements/src/elements/EFCaptions.cjs +164 -0
  24. package/dist/packages/elements/src/elements/EFCaptions.d.ts +38 -0
  25. package/dist/packages/elements/src/elements/EFCaptions.js +166 -0
  26. package/dist/packages/elements/src/elements/EFImage.cjs +79 -0
  27. package/dist/packages/elements/src/elements/EFImage.d.ts +14 -0
  28. package/dist/packages/elements/src/elements/EFImage.js +80 -0
  29. package/dist/packages/elements/src/elements/EFMedia.cjs +336 -0
  30. package/dist/packages/elements/src/elements/EFMedia.d.ts +61 -0
  31. package/dist/packages/elements/src/elements/EFMedia.js +336 -0
  32. package/dist/packages/elements/src/elements/EFSourceMixin.cjs +55 -0
  33. package/dist/packages/elements/src/elements/EFSourceMixin.d.ts +12 -0
  34. package/dist/packages/elements/src/elements/EFSourceMixin.js +55 -0
  35. package/dist/packages/elements/src/elements/EFTemporal.cjs +199 -0
  36. package/dist/packages/elements/src/elements/EFTemporal.d.ts +38 -0
  37. package/dist/packages/elements/src/elements/EFTemporal.js +199 -0
  38. package/dist/packages/elements/src/elements/EFTimegroup.browsertest.d.ts +12 -0
  39. package/dist/packages/elements/src/elements/EFTimegroup.cjs +352 -0
  40. package/dist/packages/elements/src/elements/EFTimegroup.d.ts +39 -0
  41. package/dist/packages/elements/src/elements/EFTimegroup.js +353 -0
  42. package/dist/packages/elements/src/elements/EFVideo.cjs +109 -0
  43. package/dist/packages/elements/src/elements/EFVideo.d.ts +14 -0
  44. package/dist/packages/elements/src/elements/EFVideo.js +110 -0
  45. package/dist/packages/elements/src/elements/EFWaveform.cjs +242 -0
  46. package/dist/packages/elements/src/elements/EFWaveform.d.ts +30 -0
  47. package/dist/packages/elements/src/elements/EFWaveform.js +226 -0
  48. package/dist/packages/elements/src/elements/FetchMixin.cjs +28 -0
  49. package/dist/packages/elements/src/elements/FetchMixin.d.ts +8 -0
  50. package/dist/packages/elements/src/elements/FetchMixin.js +28 -0
  51. package/dist/packages/elements/src/elements/TimegroupController.cjs +20 -0
  52. package/dist/packages/elements/src/elements/TimegroupController.d.ts +14 -0
  53. package/dist/packages/elements/src/elements/TimegroupController.js +20 -0
  54. package/dist/packages/elements/src/elements/durationConverter.cjs +8 -0
  55. package/dist/packages/elements/src/elements/durationConverter.d.ts +4 -0
  56. package/dist/packages/elements/src/elements/durationConverter.js +8 -0
  57. package/dist/packages/elements/src/elements/parseTimeToMs.cjs +12 -0
  58. package/dist/packages/elements/src/elements/parseTimeToMs.d.ts +1 -0
  59. package/dist/packages/elements/src/elements/parseTimeToMs.js +12 -0
  60. package/dist/packages/elements/src/elements/util.cjs +11 -0
  61. package/dist/packages/elements/src/elements/util.d.ts +4 -0
  62. package/dist/packages/elements/src/elements/util.js +11 -0
  63. package/dist/packages/elements/src/gui/EFFilmstrip.cjs +825 -0
  64. package/dist/packages/elements/src/gui/EFFilmstrip.d.ts +147 -0
  65. package/dist/packages/elements/src/gui/EFFilmstrip.js +833 -0
  66. package/dist/packages/elements/src/gui/EFWorkbench.cjs +214 -0
  67. package/dist/packages/elements/src/gui/EFWorkbench.d.ts +45 -0
  68. package/dist/packages/elements/src/gui/EFWorkbench.js +215 -0
  69. package/dist/packages/elements/src/gui/TWMixin.cjs +28 -0
  70. package/dist/packages/elements/src/gui/TWMixin.css.cjs +3 -0
  71. package/dist/packages/elements/src/gui/TWMixin.css.js +4 -0
  72. package/dist/packages/elements/src/gui/TWMixin.d.ts +3 -0
  73. package/dist/packages/elements/src/gui/TWMixin.js +28 -0
  74. package/dist/packages/elements/src/index.cjs +50 -0
  75. package/dist/packages/elements/src/index.d.ts +10 -0
  76. package/dist/packages/elements/src/index.js +23 -0
  77. package/dist/style.css +791 -0
  78. package/package.json +14 -8
  79. package/src/elements/CrossUpdateController.ts +22 -0
  80. package/src/elements/EFAudio.ts +40 -0
  81. package/src/elements/EFCaptions.ts +188 -0
  82. package/src/elements/EFImage.ts +68 -0
  83. package/src/elements/EFMedia.ts +389 -0
  84. package/src/elements/EFSourceMixin.ts +57 -0
  85. package/src/elements/EFTemporal.ts +234 -0
  86. package/src/elements/EFTimegroup.browsertest.ts +333 -0
  87. package/src/elements/EFTimegroup.ts +393 -0
  88. package/src/elements/EFVideo.ts +103 -0
  89. package/src/elements/EFWaveform.ts +417 -0
  90. package/src/elements/FetchMixin.ts +19 -0
  91. package/src/elements/TimegroupController.ts +25 -0
  92. package/src/elements/durationConverter.ts +6 -0
  93. package/src/elements/parseTimeToMs.ts +9 -0
  94. package/src/elements/util.ts +24 -0
  95. package/src/gui/EFFilmstrip.ts +884 -0
  96. package/src/gui/EFWorkbench.ts +233 -0
  97. package/src/gui/TWMixin.css +3 -0
  98. package/src/gui/TWMixin.ts +30 -0
@@ -0,0 +1,187 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
3
+ const MP4Box = require("mp4box");
4
+ const debug = require("debug");
5
+ function _interopNamespaceDefault(e) {
6
+ const n = Object.create(null, { [Symbol.toStringTag]: { value: "Module" } });
7
+ if (e) {
8
+ for (const k in e) {
9
+ if (k !== "default") {
10
+ const d = Object.getOwnPropertyDescriptor(e, k);
11
+ Object.defineProperty(n, k, d.get ? d : {
12
+ enumerable: true,
13
+ get: () => e[k]
14
+ });
15
+ }
16
+ }
17
+ }
18
+ n.default = e;
19
+ return Object.freeze(n);
20
+ }
21
+ const MP4Box__namespace = /* @__PURE__ */ _interopNamespaceDefault(MP4Box);
22
+ const log = debug("ef:av:mp4file");
23
+ class MP4File extends MP4Box__namespace.ISOFile {
24
+ constructor() {
25
+ super(...arguments);
26
+ this.readyPromise = new Promise((resolve, reject) => {
27
+ this.onReady = () => resolve();
28
+ this.onError = reject;
29
+ });
30
+ this.waitingForSamples = [];
31
+ this._hasSeenLastSamples = false;
32
+ this._arrayBufferFileStart = 0;
33
+ }
34
+ setSegmentOptions(id, user, options) {
35
+ const trak = this.getTrackById(id);
36
+ if (trak) {
37
+ trak.nextSample = 0;
38
+ this.fragmentedTracks.push({
39
+ id,
40
+ user,
41
+ trak,
42
+ segmentStream: null,
43
+ nb_samples: "nbSamples" in options && options.nbSamples || 1e3,
44
+ rapAlignement: ("rapAlignement" in options && options.rapAlignement) ?? true
45
+ });
46
+ }
47
+ }
48
+ /**
49
+ * Fragments all tracks in a file into separate array buffers.
50
+ */
51
+ async fragmentAllTracks() {
52
+ const trackBuffers = {};
53
+ for await (const segment of this.fragmentIterator()) {
54
+ (trackBuffers[segment.track] ??= []).push(segment.data);
55
+ }
56
+ return trackBuffers;
57
+ }
58
+ async *fragmentIterator() {
59
+ await this.readyPromise;
60
+ const trackInfo = {};
61
+ for (const videoTrack of this.getInfo().videoTracks) {
62
+ trackInfo[videoTrack.id] = { index: 0, complete: false };
63
+ this.setSegmentOptions(videoTrack.id, null, {
64
+ rapAlignement: true
65
+ });
66
+ }
67
+ for (const audioTrack of this.getInfo().audioTracks) {
68
+ trackInfo[audioTrack.id] = { index: 0, complete: false };
69
+ const sampleRate = audioTrack.audio.sample_rate;
70
+ const probablePacketSize = 1024;
71
+ const probableFourSecondsOfSamples = Math.ceil(
72
+ sampleRate / probablePacketSize * 4
73
+ );
74
+ this.setSegmentOptions(audioTrack.id, null, {
75
+ nbSamples: probableFourSecondsOfSamples
76
+ });
77
+ }
78
+ const initSegments = this.initializeSegmentation();
79
+ for (const initSegment of initSegments) {
80
+ yield {
81
+ track: initSegment.id,
82
+ segment: "init",
83
+ data: initSegment.buffer,
84
+ complete: false
85
+ };
86
+ }
87
+ const fragmentStartSamples = {};
88
+ let finishedReading = false;
89
+ const allTracksFinished = () => {
90
+ for (const fragmentedTrack of this.fragmentedTracks) {
91
+ if (!trackInfo[fragmentedTrack.id]?.complete) {
92
+ return false;
93
+ }
94
+ }
95
+ return true;
96
+ };
97
+ while (!(finishedReading && allTracksFinished())) {
98
+ for (const fragTrak of this.fragmentedTracks) {
99
+ const trak = fragTrak.trak;
100
+ if (trak.nextSample === void 0) {
101
+ throw new Error("trak.nextSample is undefined");
102
+ }
103
+ if (trak.samples === void 0) {
104
+ throw new Error("trak.samples is undefined");
105
+ }
106
+ while (trak.nextSample < trak.samples.length) {
107
+ let result = void 0;
108
+ const fragTrakNextSample = trak.samples[trak.nextSample];
109
+ if (fragTrakNextSample) {
110
+ fragmentStartSamples[fragTrak.id] ||= fragTrakNextSample;
111
+ }
112
+ try {
113
+ result = this.createFragment(
114
+ fragTrak.id,
115
+ trak.nextSample,
116
+ fragTrak.segmentStream
117
+ );
118
+ } catch (error) {
119
+ console.error("Failed to createFragment", error);
120
+ }
121
+ if (result) {
122
+ fragTrak.segmentStream = result;
123
+ trak.nextSample++;
124
+ } else {
125
+ finishedReading = await this.waitForMoreSamples();
126
+ break;
127
+ }
128
+ const nextSample = trak.samples[trak.nextSample];
129
+ const emitSegment = (
130
+ // if rapAlignement is true, we emit a fragment when we have a rap sample coming up next
131
+ fragTrak.rapAlignement === true && nextSample?.is_sync || // if rapAlignement is false, we emit a fragment when we have the required number of samples
132
+ !fragTrak.rapAlignement && trak.nextSample % fragTrak.nb_samples === 0 || // // if this is the last sample, we emit the fragment
133
+ // finished ||
134
+ // if we have more samples than the number of samples requested, we emit the fragment
135
+ trak.nextSample >= trak.samples.length
136
+ );
137
+ if (emitSegment) {
138
+ const trackInfoForFrag = trackInfo[fragTrak.id];
139
+ if (!trackInfoForFrag) {
140
+ throw new Error("trackInfoForFrag is undefined");
141
+ }
142
+ if (trak.nextSample >= trak.samples.length) {
143
+ trackInfoForFrag.complete = true;
144
+ }
145
+ log(
146
+ `Yielding fragment #${trackInfoForFrag.index} for track=${fragTrak.id}`
147
+ );
148
+ const startSample = fragmentStartSamples[fragTrak.id];
149
+ const endSample = trak.samples[trak.nextSample - 1];
150
+ if (!startSample || !endSample) {
151
+ throw new Error("startSample or endSample is undefined");
152
+ }
153
+ yield {
154
+ track: fragTrak.id,
155
+ segment: trackInfoForFrag.index,
156
+ data: fragTrak.segmentStream.buffer,
157
+ complete: trackInfoForFrag.complete,
158
+ cts: startSample.cts,
159
+ dts: startSample.dts,
160
+ duration: endSample.cts - startSample.cts + endSample.duration
161
+ };
162
+ trackInfoForFrag.index += 1;
163
+ fragTrak.segmentStream = null;
164
+ delete fragmentStartSamples[fragTrak.id];
165
+ }
166
+ }
167
+ }
168
+ finishedReading = await this.waitForMoreSamples();
169
+ }
170
+ }
171
+ waitForMoreSamples() {
172
+ if (this._hasSeenLastSamples) {
173
+ return Promise.resolve(true);
174
+ }
175
+ return new Promise((resolve) => {
176
+ this.waitingForSamples.push(resolve);
177
+ });
178
+ }
179
+ processSamples(last) {
180
+ this._hasSeenLastSamples = last;
181
+ for (const observer of this.waitingForSamples) {
182
+ observer(last);
183
+ }
184
+ this.waitingForSamples = [];
185
+ }
186
+ }
187
+ exports.MP4File = MP4File;
@@ -0,0 +1,170 @@
1
+ import * as MP4Box from "mp4box";
2
+ import debug from "debug";
3
+ const log = debug("ef:av:mp4file");
4
+ class MP4File extends MP4Box.ISOFile {
5
+ constructor() {
6
+ super(...arguments);
7
+ this.readyPromise = new Promise((resolve, reject) => {
8
+ this.onReady = () => resolve();
9
+ this.onError = reject;
10
+ });
11
+ this.waitingForSamples = [];
12
+ this._hasSeenLastSamples = false;
13
+ this._arrayBufferFileStart = 0;
14
+ }
15
+ setSegmentOptions(id, user, options) {
16
+ const trak = this.getTrackById(id);
17
+ if (trak) {
18
+ trak.nextSample = 0;
19
+ this.fragmentedTracks.push({
20
+ id,
21
+ user,
22
+ trak,
23
+ segmentStream: null,
24
+ nb_samples: "nbSamples" in options && options.nbSamples || 1e3,
25
+ rapAlignement: ("rapAlignement" in options && options.rapAlignement) ?? true
26
+ });
27
+ }
28
+ }
29
+ /**
30
+ * Fragments all tracks in a file into separate array buffers.
31
+ */
32
+ async fragmentAllTracks() {
33
+ const trackBuffers = {};
34
+ for await (const segment of this.fragmentIterator()) {
35
+ (trackBuffers[segment.track] ??= []).push(segment.data);
36
+ }
37
+ return trackBuffers;
38
+ }
39
+ async *fragmentIterator() {
40
+ await this.readyPromise;
41
+ const trackInfo = {};
42
+ for (const videoTrack of this.getInfo().videoTracks) {
43
+ trackInfo[videoTrack.id] = { index: 0, complete: false };
44
+ this.setSegmentOptions(videoTrack.id, null, {
45
+ rapAlignement: true
46
+ });
47
+ }
48
+ for (const audioTrack of this.getInfo().audioTracks) {
49
+ trackInfo[audioTrack.id] = { index: 0, complete: false };
50
+ const sampleRate = audioTrack.audio.sample_rate;
51
+ const probablePacketSize = 1024;
52
+ const probableFourSecondsOfSamples = Math.ceil(
53
+ sampleRate / probablePacketSize * 4
54
+ );
55
+ this.setSegmentOptions(audioTrack.id, null, {
56
+ nbSamples: probableFourSecondsOfSamples
57
+ });
58
+ }
59
+ const initSegments = this.initializeSegmentation();
60
+ for (const initSegment of initSegments) {
61
+ yield {
62
+ track: initSegment.id,
63
+ segment: "init",
64
+ data: initSegment.buffer,
65
+ complete: false
66
+ };
67
+ }
68
+ const fragmentStartSamples = {};
69
+ let finishedReading = false;
70
+ const allTracksFinished = () => {
71
+ for (const fragmentedTrack of this.fragmentedTracks) {
72
+ if (!trackInfo[fragmentedTrack.id]?.complete) {
73
+ return false;
74
+ }
75
+ }
76
+ return true;
77
+ };
78
+ while (!(finishedReading && allTracksFinished())) {
79
+ for (const fragTrak of this.fragmentedTracks) {
80
+ const trak = fragTrak.trak;
81
+ if (trak.nextSample === void 0) {
82
+ throw new Error("trak.nextSample is undefined");
83
+ }
84
+ if (trak.samples === void 0) {
85
+ throw new Error("trak.samples is undefined");
86
+ }
87
+ while (trak.nextSample < trak.samples.length) {
88
+ let result = void 0;
89
+ const fragTrakNextSample = trak.samples[trak.nextSample];
90
+ if (fragTrakNextSample) {
91
+ fragmentStartSamples[fragTrak.id] ||= fragTrakNextSample;
92
+ }
93
+ try {
94
+ result = this.createFragment(
95
+ fragTrak.id,
96
+ trak.nextSample,
97
+ fragTrak.segmentStream
98
+ );
99
+ } catch (error) {
100
+ console.error("Failed to createFragment", error);
101
+ }
102
+ if (result) {
103
+ fragTrak.segmentStream = result;
104
+ trak.nextSample++;
105
+ } else {
106
+ finishedReading = await this.waitForMoreSamples();
107
+ break;
108
+ }
109
+ const nextSample = trak.samples[trak.nextSample];
110
+ const emitSegment = (
111
+ // if rapAlignement is true, we emit a fragment when we have a rap sample coming up next
112
+ fragTrak.rapAlignement === true && nextSample?.is_sync || // if rapAlignement is false, we emit a fragment when we have the required number of samples
113
+ !fragTrak.rapAlignement && trak.nextSample % fragTrak.nb_samples === 0 || // // if this is the last sample, we emit the fragment
114
+ // finished ||
115
+ // if we have more samples than the number of samples requested, we emit the fragment
116
+ trak.nextSample >= trak.samples.length
117
+ );
118
+ if (emitSegment) {
119
+ const trackInfoForFrag = trackInfo[fragTrak.id];
120
+ if (!trackInfoForFrag) {
121
+ throw new Error("trackInfoForFrag is undefined");
122
+ }
123
+ if (trak.nextSample >= trak.samples.length) {
124
+ trackInfoForFrag.complete = true;
125
+ }
126
+ log(
127
+ `Yielding fragment #${trackInfoForFrag.index} for track=${fragTrak.id}`
128
+ );
129
+ const startSample = fragmentStartSamples[fragTrak.id];
130
+ const endSample = trak.samples[trak.nextSample - 1];
131
+ if (!startSample || !endSample) {
132
+ throw new Error("startSample or endSample is undefined");
133
+ }
134
+ yield {
135
+ track: fragTrak.id,
136
+ segment: trackInfoForFrag.index,
137
+ data: fragTrak.segmentStream.buffer,
138
+ complete: trackInfoForFrag.complete,
139
+ cts: startSample.cts,
140
+ dts: startSample.dts,
141
+ duration: endSample.cts - startSample.cts + endSample.duration
142
+ };
143
+ trackInfoForFrag.index += 1;
144
+ fragTrak.segmentStream = null;
145
+ delete fragmentStartSamples[fragTrak.id];
146
+ }
147
+ }
148
+ }
149
+ finishedReading = await this.waitForMoreSamples();
150
+ }
151
+ }
152
+ waitForMoreSamples() {
153
+ if (this._hasSeenLastSamples) {
154
+ return Promise.resolve(true);
155
+ }
156
+ return new Promise((resolve) => {
157
+ this.waitingForSamples.push(resolve);
158
+ });
159
+ }
160
+ processSamples(last) {
161
+ this._hasSeenLastSamples = last;
162
+ for (const observer of this.waitingForSamples) {
163
+ observer(last);
164
+ }
165
+ this.waitingForSamples = [];
166
+ }
167
+ }
168
+ export {
169
+ MP4File
170
+ };
@@ -0,0 +1,15 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
3
+ const msToTimeCode = (ms, subSecond = false) => {
4
+ const seconds = Math.floor(ms / 1e3);
5
+ const minutes = Math.floor(seconds / 60);
6
+ const hours = Math.floor(minutes / 60);
7
+ const pad = (num) => num.toString().padStart(2, "0");
8
+ let timecode = `${pad(hours)}:${pad(minutes % 60)}:${pad(seconds % 60)}`;
9
+ if (subSecond) {
10
+ const subSeconds = Math.floor(ms % 1e3 / 10);
11
+ timecode += `.${subSeconds.toString().padStart(2, "0")}`;
12
+ }
13
+ return timecode;
14
+ };
15
+ exports.msToTimeCode = msToTimeCode;
@@ -0,0 +1,15 @@
1
+ const msToTimeCode = (ms, subSecond = false) => {
2
+ const seconds = Math.floor(ms / 1e3);
3
+ const minutes = Math.floor(seconds / 60);
4
+ const hours = Math.floor(minutes / 60);
5
+ const pad = (num) => num.toString().padStart(2, "0");
6
+ let timecode = `${pad(hours)}:${pad(minutes % 60)}:${pad(seconds % 60)}`;
7
+ if (subSecond) {
8
+ const subSeconds = Math.floor(ms % 1e3 / 10);
9
+ timecode += `.${subSeconds.toString().padStart(2, "0")}`;
10
+ }
11
+ return timecode;
12
+ };
13
+ export {
14
+ msToTimeCode
15
+ };
@@ -0,0 +1,4 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
3
+ const awaitMicrotask = () => new Promise(queueMicrotask);
4
+ exports.awaitMicrotask = awaitMicrotask;
@@ -0,0 +1,4 @@
1
+ const awaitMicrotask = () => new Promise(queueMicrotask);
2
+ export {
3
+ awaitMicrotask
4
+ };
@@ -0,0 +1,14 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
3
+ const memoize = (_target, _propertyKey, descriptor) => {
4
+ const get = descriptor.get;
5
+ if (!get) return;
6
+ const memoized = /* @__PURE__ */ new WeakMap();
7
+ descriptor.get = function() {
8
+ if (!memoized.has(this)) {
9
+ memoized.set(this, get.call(this));
10
+ }
11
+ return memoized.get(this);
12
+ };
13
+ };
14
+ exports.memoize = memoize;
@@ -0,0 +1,14 @@
1
+ const memoize = (_target, _propertyKey, descriptor) => {
2
+ const get = descriptor.get;
3
+ if (!get) return;
4
+ const memoized = /* @__PURE__ */ new WeakMap();
5
+ descriptor.get = function() {
6
+ if (!memoized.has(this)) {
7
+ memoized.set(this, get.call(this));
8
+ }
9
+ return memoized.get(this);
10
+ };
11
+ };
12
+ export {
13
+ memoize
14
+ };
@@ -0,0 +1,197 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
3
+ const debug = require("debug");
4
+ const task = require("@lit/task");
5
+ const awaitMicrotask = require("../../../lib/util/awaitMicrotask.cjs");
6
+ const EFTemporal = require("./elements/EFTemporal.cjs");
7
+ const EFTimegroup = require("./elements/EFTimegroup.cjs");
8
+ const log = debug("ef:elements:EF_FRAMEGEN");
9
+ class TriggerCanvas {
10
+ constructor() {
11
+ this.canvas = document.createElement("canvas");
12
+ this.canvas.width = 1;
13
+ this.canvas.height = 1;
14
+ Object.assign(this.canvas.style, {
15
+ position: "fixed",
16
+ top: "0px",
17
+ left: "0px",
18
+ width: "1px",
19
+ height: "1px",
20
+ zIndex: "100000"
21
+ });
22
+ document.body.prepend(this.canvas);
23
+ const ctx = this.canvas.getContext("2d", { willReadFrequently: true });
24
+ if (!ctx) throw new Error("Canvas 2d context not ready");
25
+ this.ctx = ctx;
26
+ this.ctx.fillStyle = "black";
27
+ }
28
+ trigger() {
29
+ log("TRIGGERING CANVAS");
30
+ this.ctx.clearRect(0, 0, this.canvas.width, this.canvas.height);
31
+ }
32
+ }
33
+ class EfFramegen {
34
+ constructor() {
35
+ this.time = 0;
36
+ this.frameDurationMs = 0;
37
+ this.initialBusyTasks = Promise.resolve([]);
38
+ this.frameBox = document.createElement("div");
39
+ this.BRIDGE = window.FRAMEGEN_BRIDGE;
40
+ this.triggerCanvas = new TriggerCanvas();
41
+ if (this.BRIDGE) {
42
+ this.connectToBridge();
43
+ }
44
+ }
45
+ trace(...args) {
46
+ console.trace("[EF_FRAMEGEN]", ...args);
47
+ }
48
+ connectToBridge() {
49
+ const BRIDGE = this.BRIDGE;
50
+ if (!BRIDGE) {
51
+ throw new Error("No BRIDGE when attempting to connect to bridge");
52
+ }
53
+ BRIDGE.onInitialize(async (renderId, renderOptions) => {
54
+ log("BRIDGE.onInitialize", renderId, renderOptions);
55
+ await this.initialize(renderId, renderOptions);
56
+ BRIDGE.initialized(renderId);
57
+ });
58
+ BRIDGE.onBeginFrame((renderId, frameNumber, isLast) => {
59
+ log("BRIDGE.onBeginFrame", renderId, frameNumber, isLast);
60
+ this.beginFrame(renderId, frameNumber, isLast);
61
+ });
62
+ }
63
+ async initialize(renderId, renderOptions) {
64
+ addEventListener("unhandledrejection", (event) => {
65
+ this.trace("Unhandled rejection:", event.reason);
66
+ if (this.BRIDGE) {
67
+ this.BRIDGE.error(renderId, event.reason);
68
+ }
69
+ });
70
+ addEventListener("error", (event) => {
71
+ this.trace("Uncaught error", event.error);
72
+ if (this.BRIDGE) {
73
+ this.BRIDGE.error(renderId, event.error);
74
+ }
75
+ });
76
+ this.renderOptions = renderOptions;
77
+ const workbench = document.querySelector("ef-workbench");
78
+ if (!workbench) {
79
+ throw new Error("No workbench found");
80
+ }
81
+ workbench.rendering = true;
82
+ const timegroups = EFTimegroup.shallowGetTimegroups(workbench);
83
+ const temporals = EFTemporal.deepGetElementsWithFrameTasks(workbench);
84
+ const firstGroup = timegroups[0];
85
+ if (!firstGroup) {
86
+ throw new Error("No temporal elements found");
87
+ }
88
+ firstGroup.currentTimeMs = renderOptions.encoderOptions.fromMs;
89
+ this.frameDurationMs = 1e3 / renderOptions.encoderOptions.video.framerate;
90
+ this.initialBusyTasks = Promise.all(
91
+ temporals.filter((temporal) => temporal.frameTask.status < task.TaskStatus.COMPLETE).map((temporal) => temporal.frameTask).map((task2) => task2.taskComplete)
92
+ );
93
+ this.time = 0;
94
+ if (renderOptions.showFrameBox) {
95
+ Object.assign(this.frameBox.style, {
96
+ width: "200px",
97
+ height: "100px",
98
+ font: "30px Arial",
99
+ backgroundColor: "white",
100
+ position: "absolute",
101
+ top: "0px",
102
+ left: "0px",
103
+ zIndex: "100000"
104
+ });
105
+ document.body.prepend(this.frameBox);
106
+ }
107
+ this.audioBufferPromise = firstGroup.renderAudio(
108
+ renderOptions.encoderOptions.alignedFromUs / 1e3,
109
+ renderOptions.encoderOptions.alignedToUs / 1e3
110
+ // renderOptions.encoderOptions.fromMs,
111
+ // renderOptions.encoderOptions.toMs,
112
+ );
113
+ log("Initialized");
114
+ }
115
+ async beginFrame(renderId, frameNumber, isLast) {
116
+ if (this.renderOptions === void 0) {
117
+ throw new Error("No renderOptions");
118
+ }
119
+ if (this.renderOptions.showFrameBox) {
120
+ this.frameBox.innerHTML = `
121
+ <div>Frame #${frameNumber}</div>
122
+ <div>${this.time.toFixed(4)}</div>
123
+ `;
124
+ }
125
+ const workbench = document.querySelector("ef-workbench");
126
+ if (!workbench) {
127
+ throw new Error("No workbench found");
128
+ }
129
+ workbench.rendering = true;
130
+ const timegroups = EFTimegroup.shallowGetTimegroups(workbench);
131
+ const temporals = EFTemporal.deepGetElementsWithFrameTasks(workbench);
132
+ const firstGroup = timegroups[0];
133
+ if (!firstGroup) {
134
+ throw new Error("No temporal elements found");
135
+ }
136
+ this.time = this.renderOptions.encoderOptions.fromMs + frameNumber * this.frameDurationMs;
137
+ firstGroup.currentTimeMs = this.time;
138
+ log("Awaiting initialBusyTasks");
139
+ await this.initialBusyTasks;
140
+ log("Awaiting microtask");
141
+ await awaitMicrotask.awaitMicrotask();
142
+ log("Awaiting frame tasks");
143
+ const now = performance.now();
144
+ await Promise.all(
145
+ temporals.filter((temporal) => temporal.frameTask.status < task.TaskStatus.COMPLETE).map((temporal) => {
146
+ return temporal.frameTask;
147
+ }).map((task2) => task2.taskComplete)
148
+ );
149
+ log(`frame:${frameNumber} All tasks complete ${performance.now() - now}ms`);
150
+ if (isLast && this.audioBufferPromise) {
151
+ const renderedAudio = await this.audioBufferPromise;
152
+ const channelCount = renderedAudio.numberOfChannels;
153
+ const interleavedSamples = new Float32Array(
154
+ channelCount * renderedAudio.length
155
+ );
156
+ for (let i = 0; i < renderedAudio.length; i++) {
157
+ for (let j = 0; j < channelCount; j++) {
158
+ interleavedSamples.set(
159
+ renderedAudio.getChannelData(j).slice(i, i + 1),
160
+ i * channelCount + j
161
+ );
162
+ }
163
+ }
164
+ if (this.BRIDGE) {
165
+ this.triggerCanvas.trigger();
166
+ this.BRIDGE.frameReady(
167
+ renderId,
168
+ frameNumber,
169
+ interleavedSamples.buffer
170
+ );
171
+ } else {
172
+ const fileReader = new FileReader();
173
+ fileReader.readAsDataURL(new Blob([interleavedSamples.buffer]));
174
+ await new Promise((resolve, reject) => {
175
+ fileReader.onload = resolve;
176
+ fileReader.onerror = reject;
177
+ });
178
+ return fileReader.result;
179
+ }
180
+ } else {
181
+ if (this.BRIDGE) {
182
+ this.triggerCanvas.trigger();
183
+ this.BRIDGE.frameReady(renderId, frameNumber, new ArrayBuffer(0));
184
+ } else {
185
+ const fileReader = new FileReader();
186
+ fileReader.readAsDataURL(new Blob([]));
187
+ await new Promise((resolve, reject) => {
188
+ fileReader.onload = resolve;
189
+ fileReader.onerror = reject;
190
+ });
191
+ return fileReader.result;
192
+ }
193
+ }
194
+ }
195
+ }
196
+ window.EF_FRAMEGEN = new EfFramegen();
197
+ exports.EfFramegen = EfFramegen;
@@ -0,0 +1,44 @@
1
+ import { VideoRenderOptions } from '../../assets/src';
2
+
3
+ declare global {
4
+ interface Window {
5
+ EF_FRAMEGEN?: EfFramegen;
6
+ FRAMEGEN_BRIDGE?: {
7
+ onInitialize: (callback: (renderId: string, renderOptions: VideoRenderOptions) => void) => void;
8
+ initialized(renderId: string): void;
9
+ onBeginFrame(callback: (renderId: string, frameNumber: number, isLast: boolean) => void): void;
10
+ onTriggerCanvas(callback: () => void): void;
11
+ frameReady(renderId: string, frameNumber: number, audioSamples: ArrayBuffer): void;
12
+ error(renderId: string, error: Error): void;
13
+ };
14
+ }
15
+ }
16
+ declare class TriggerCanvas {
17
+ private canvas;
18
+ private ctx;
19
+ constructor();
20
+ trigger(): void;
21
+ }
22
+ export declare class EfFramegen {
23
+ time: number;
24
+ frameDurationMs: number;
25
+ initialBusyTasks: Promise<unknown[]>;
26
+ audioBufferPromise?: Promise<AudioBuffer>;
27
+ renderOptions?: VideoRenderOptions;
28
+ frameBox: HTMLDivElement;
29
+ BRIDGE: {
30
+ onInitialize: (callback: (renderId: string, renderOptions: VideoRenderOptions) => void) => void;
31
+ initialized(renderId: string): void;
32
+ onBeginFrame(callback: (renderId: string, frameNumber: number, isLast: boolean) => void): void;
33
+ onTriggerCanvas(callback: () => void): void;
34
+ frameReady(renderId: string, frameNumber: number, audioSamples: ArrayBuffer): void;
35
+ error(renderId: string, error: Error): void;
36
+ } | undefined;
37
+ triggerCanvas: TriggerCanvas;
38
+ trace(...args: any[]): void;
39
+ constructor();
40
+ connectToBridge(): void;
41
+ initialize(renderId: string, renderOptions: VideoRenderOptions): Promise<void>;
42
+ beginFrame(renderId: string, frameNumber: number, isLast: boolean): Promise<string | ArrayBuffer | null | undefined>;
43
+ }
44
+ export {};