@editframe/elements 0.5.0-beta.6 → 0.5.0-beta.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. package/dist/elements/src/EF_FRAMEGEN.mjs +130 -0
  2. package/dist/elements/src/EF_INTERACTIVE.mjs +4 -0
  3. package/dist/elements/{elements → src/elements}/EFAudio.mjs +20 -0
  4. package/dist/elements/{elements → src/elements}/EFCaptions.mjs +3 -0
  5. package/dist/elements/{elements → src/elements}/EFImage.mjs +15 -3
  6. package/dist/elements/{elements → src/elements}/EFMedia.mjs +81 -4
  7. package/dist/elements/{elements → src/elements}/EFTemporal.mjs +29 -1
  8. package/dist/elements/{elements → src/elements}/EFTimegroup.mjs +124 -0
  9. package/dist/elements/{elements → src/elements}/EFVideo.mjs +10 -0
  10. package/dist/elements/{elements → src/elements}/EFWaveform.mjs +41 -24
  11. package/dist/elements/{elements.mjs → src/elements.mjs} +2 -1
  12. package/dist/elements/{gui → src/gui}/EFFilmstrip.mjs +3 -2
  13. package/dist/elements/{gui → src/gui}/EFWorkbench.mjs +51 -63
  14. package/dist/elements/{gui → src/gui}/TWMixin.css.mjs +1 -1
  15. package/dist/style.css +3 -0
  16. package/dist/util/awaitAnimationFrame.mjs +11 -0
  17. package/docker-compose.yaml +17 -0
  18. package/package.json +2 -2
  19. package/src/EF_FRAMEGEN.ts +208 -0
  20. package/src/EF_INTERACTIVE.ts +2 -0
  21. package/src/elements/CrossUpdateController.ts +18 -0
  22. package/src/elements/EFAudio.ts +42 -0
  23. package/src/elements/EFCaptions.ts +202 -0
  24. package/src/elements/EFImage.ts +70 -0
  25. package/src/elements/EFMedia.ts +395 -0
  26. package/src/elements/EFSourceMixin.ts +57 -0
  27. package/src/elements/EFTemporal.ts +246 -0
  28. package/src/elements/EFTimegroup.browsertest.ts +360 -0
  29. package/src/elements/EFTimegroup.ts +394 -0
  30. package/src/elements/EFTimeline.ts +13 -0
  31. package/src/elements/EFVideo.ts +114 -0
  32. package/src/elements/EFWaveform.ts +407 -0
  33. package/src/elements/FetchMixin.ts +18 -0
  34. package/src/elements/TimegroupController.ts +25 -0
  35. package/src/elements/buildLitFixture.ts +13 -0
  36. package/src/elements/durationConverter.ts +6 -0
  37. package/src/elements/parseTimeToMs.ts +10 -0
  38. package/src/elements/util.ts +24 -0
  39. package/src/gui/EFFilmstrip.ts +702 -0
  40. package/src/gui/EFWorkbench.ts +242 -0
  41. package/src/gui/TWMixin.css +3 -0
  42. package/src/gui/TWMixin.ts +27 -0
  43. package/src/util.d.ts +1 -0
  44. package/dist/elements/elements.css.mjs +0 -1
  45. /package/dist/elements/{elements → src/elements}/CrossUpdateController.mjs +0 -0
  46. /package/dist/elements/{elements → src/elements}/EFSourceMixin.mjs +0 -0
  47. /package/dist/elements/{elements → src/elements}/EFTimeline.mjs +0 -0
  48. /package/dist/elements/{elements → src/elements}/FetchMixin.mjs +0 -0
  49. /package/dist/elements/{elements → src/elements}/TimegroupController.mjs +0 -0
  50. /package/dist/elements/{elements → src/elements}/durationConverter.mjs +0 -0
  51. /package/dist/elements/{elements → src/elements}/parseTimeToMs.mjs +0 -0
  52. /package/dist/elements/{elements → src/elements}/util.mjs +0 -0
  53. /package/dist/elements/{gui → src/gui}/TWMixin.mjs +0 -0
@@ -0,0 +1,130 @@
1
+ import { shallowGetTimegroups, deepGetElementsWithFrameTasks } from "./elements/EFTemporal.mjs";
2
+ import { awaitAnimationFrame } from "../../util/awaitAnimationFrame.mjs";
3
+ import { awaitMicrotask } from "../../util/awaitMicrotask.mjs";
4
+ class TriggerCanvas {
5
+ constructor() {
6
+ this.canvas = document.createElement("canvas");
7
+ this.canvas.width = 1;
8
+ this.canvas.height = 1;
9
+ Object.assign(this.canvas.style, {
10
+ position: "absolute",
11
+ top: "0px",
12
+ left: "0px",
13
+ width: `1px`,
14
+ height: `1px`,
15
+ zIndex: "100000"
16
+ });
17
+ document.body.prepend(this.canvas);
18
+ this.ctx = this.canvas.getContext("2d");
19
+ }
20
+ trigger() {
21
+ console.log("Triggering");
22
+ this.ctx.fillStyle = "rgba(50, 0, 0, .8)";
23
+ this.ctx.clearRect(0, 0, this.canvas.width, this.canvas.height);
24
+ this.ctx.fillRect(0, 0, this.canvas.width, this.canvas.height);
25
+ }
26
+ }
27
+ if (window.EF_FRAMEGEN !== void 0) {
28
+ const EF_FRAMEGEN = window.EF_FRAMEGEN;
29
+ EF_FRAMEGEN.onRender((renderId, traceCarrier, renderOptions) => {
30
+ const crashOnUnhandledError = (error) => {
31
+ EF_FRAMEGEN.error(renderId, error);
32
+ };
33
+ window.addEventListener("error", (error) => {
34
+ console.warn("Crashing due to unhandled error", error);
35
+ crashOnUnhandledError(error.message);
36
+ });
37
+ window.addEventListener("unhandledrejection", (error) => {
38
+ console.warn("Crashing due to unhandled rejection", error);
39
+ crashOnUnhandledError(error.reason);
40
+ });
41
+ const triggerCanvas = new TriggerCanvas();
42
+ EF_FRAMEGEN.onPaint(async () => {
43
+ triggerCanvas.trigger();
44
+ await awaitAnimationFrame();
45
+ await awaitAnimationFrame();
46
+ EF_FRAMEGEN.didPaint(renderId);
47
+ });
48
+ const workbench = document.querySelector("ef-workbench");
49
+ workbench.rendering = true;
50
+ const timegroups = shallowGetTimegroups(workbench);
51
+ const temporals = deepGetElementsWithFrameTasks(workbench);
52
+ const firstGroup = timegroups[0];
53
+ if (!firstGroup) {
54
+ throw new Error("No temporal elements found");
55
+ }
56
+ firstGroup.currentTimeMs = renderOptions.encoderOptions.fromMs;
57
+ const frameDurationMs = 1e3 / renderOptions.encoderOptions.video.framerate;
58
+ const initialBusyTasks = Promise.all(
59
+ temporals.map((temporal) => temporal.frameTask).map((task) => task.taskComplete)
60
+ );
61
+ const frameBox = document.createElement("div");
62
+ Object.assign(frameBox.style, {
63
+ width: "200px",
64
+ height: "100px",
65
+ font: "30px Arial",
66
+ backgroundColor: "white",
67
+ position: "absolute",
68
+ top: "0px",
69
+ left: "0px",
70
+ zIndex: "100000"
71
+ });
72
+ let time = 0;
73
+ console.log("onRender", renderId, traceCarrier, renderOptions);
74
+ const audioBufferPromise = firstGroup.renderAudio(
75
+ renderOptions.encoderOptions.alignedFromUs / 1e3,
76
+ renderOptions.encoderOptions.alignedToUs / 1e3
77
+ // renderOptions.encoderOptions.fromMs,
78
+ // renderOptions.encoderOptions.toMs,
79
+ );
80
+ EF_FRAMEGEN.onBegin(async (traceCarrier2, frame, isLast) => {
81
+ time = firstGroup.currentTimeMs = renderOptions.encoderOptions.fromMs + frame * frameDurationMs;
82
+ console.log("FRAME #", frame);
83
+ frameBox.innerHTML = `
84
+ <div>Frame #${frame}</div>
85
+ <div>${time.toFixed(4)}</div>
86
+ `;
87
+ await initialBusyTasks;
88
+ console.log("TIME", time.toFixed(4));
89
+ await awaitMicrotask();
90
+ console.log("After microtask");
91
+ const now = performance.now();
92
+ console.log(`frame:${frame} Awaiting busyTasks`);
93
+ await Promise.all(
94
+ temporals.map((temporal) => {
95
+ console.log(
96
+ "Awaiting",
97
+ temporal.tagName,
98
+ temporal.frameTask.status,
99
+ temporal.frameTask.taskComplete
100
+ );
101
+ return temporal.frameTask;
102
+ }).map((task) => task.taskComplete)
103
+ );
104
+ console.log(
105
+ `frame:${frame} All tasks complete ${performance.now() - now}ms`
106
+ );
107
+ await awaitAnimationFrame();
108
+ await awaitAnimationFrame();
109
+ triggerCanvas.trigger();
110
+ if (isLast) {
111
+ const renderedAudio = await audioBufferPromise;
112
+ const channelCount = renderedAudio.numberOfChannels;
113
+ const interleavedSamples = new Float32Array(
114
+ channelCount * renderedAudio.length
115
+ );
116
+ for (let i = 0; i < renderedAudio.length; i++) {
117
+ for (let j = 0; j < channelCount; j++) {
118
+ interleavedSamples.set(
119
+ renderedAudio.getChannelData(j).slice(i, i + 1),
120
+ i * channelCount + j
121
+ );
122
+ }
123
+ }
124
+ EF_FRAMEGEN.frameReady(renderId, frame, interleavedSamples.buffer);
125
+ } else {
126
+ EF_FRAMEGEN.frameReady(renderId, frame, new Float32Array(0).buffer);
127
+ }
128
+ });
129
+ });
130
+ }
@@ -0,0 +1,4 @@
1
+ const EF_INTERACTIVE = !window.location.search.includes("EF_NONINTERACTIVE");
2
+ export {
3
+ EF_INTERACTIVE
4
+ };
@@ -2,6 +2,7 @@ import { html } from "lit";
2
2
  import { createRef, ref } from "lit/directives/ref.js";
3
3
  import { property, customElement } from "lit/decorators.js";
4
4
  import { EFMedia } from "./EFMedia.mjs";
5
+ import { Task } from "@lit/task";
5
6
  var __defProp = Object.defineProperty;
6
7
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
7
8
  var __decorateClass = (decorators, target, key, kind) => {
@@ -18,6 +19,25 @@ let EFAudio = class extends EFMedia {
18
19
  super(...arguments);
19
20
  this.audioElementRef = createRef();
20
21
  this.src = "";
22
+ this.frameTask = new Task(this, {
23
+ args: () => [
24
+ this.trackFragmentIndexLoader.status,
25
+ this.initSegmentsLoader.status,
26
+ this.seekTask.status,
27
+ this.fetchSeekTask.status,
28
+ this.videoAssetTask.status
29
+ ],
30
+ task: async () => {
31
+ console.log("EFAudio frameTask", this.ownCurrentTimeMs);
32
+ await this.trackFragmentIndexLoader.taskComplete;
33
+ await this.initSegmentsLoader.taskComplete;
34
+ await this.seekTask.taskComplete;
35
+ await this.fetchSeekTask.taskComplete;
36
+ await this.videoAssetTask.taskComplete;
37
+ console.log("EFAudio frameTask complete", this.ownCurrentTimeMs);
38
+ this.rootTimegroup?.requestUpdate();
39
+ }
40
+ });
21
41
  }
22
42
  render() {
23
43
  return html`<audio ${ref(this.audioElementRef)}></audio>`;
@@ -7,6 +7,7 @@ import { EFTemporal } from "./EFTemporal.mjs";
7
7
  import { CrossUpdateController } from "./CrossUpdateController.mjs";
8
8
  import { FetchMixin } from "./FetchMixin.mjs";
9
9
  import { EFSourceMixin } from "./EFSourceMixin.mjs";
10
+ import { EF_INTERACTIVE } from "../EF_INTERACTIVE.mjs";
10
11
  var __defProp = Object.defineProperty;
11
12
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
12
13
  var __decorateClass = (decorators, target, key, kind) => {
@@ -73,6 +74,7 @@ let EFCaptions = class extends EFSourceMixin(
73
74
  }
74
75
  });
75
76
  this.captionsDataTask = new Task(this, {
77
+ autoRun: EF_INTERACTIVE,
76
78
  args: () => [this.captionsPath(), this.fetch],
77
79
  task: async ([captionsPath, fetch2], { signal }) => {
78
80
  const response = await fetch2(captionsPath, { signal });
@@ -80,6 +82,7 @@ let EFCaptions = class extends EFSourceMixin(
80
82
  }
81
83
  });
82
84
  this.frameTask = new Task(this, {
85
+ autoRun: EF_INTERACTIVE,
83
86
  args: () => [this.captionsDataTask.status],
84
87
  task: async () => {
85
88
  await this.captionsDataTask.taskComplete;
@@ -4,6 +4,7 @@ import { customElement } from "lit/decorators.js";
4
4
  import { createRef, ref } from "lit/directives/ref.js";
5
5
  import { FetchMixin } from "./FetchMixin.mjs";
6
6
  import { EFSourceMixin } from "./EFSourceMixin.mjs";
7
+ import { EF_INTERACTIVE } from "../EF_INTERACTIVE.mjs";
7
8
  var __defProp = Object.defineProperty;
8
9
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
9
10
  var __decorateClass = (decorators, target, key, kind) => {
@@ -21,14 +22,25 @@ let EFImage = class extends EFSourceMixin(FetchMixin(LitElement), {
21
22
  constructor() {
22
23
  super(...arguments);
23
24
  this.imageRef = createRef();
25
+ this.canvasRef = createRef();
24
26
  this.fetchImage = new Task(this, {
27
+ autoRun: EF_INTERACTIVE,
25
28
  args: () => [this.assetPath(), this.fetch],
26
29
  task: async ([assetPath, fetch], { signal }) => {
27
30
  const response = await fetch(assetPath, { signal });
28
- return URL.createObjectURL(await response.blob());
31
+ const image = new Image();
32
+ image.src = URL.createObjectURL(await response.blob());
33
+ await new Promise((resolve) => {
34
+ image.onload = resolve;
35
+ });
36
+ this.canvasRef.value.width = image.width;
37
+ this.canvasRef.value.height = image.height;
38
+ const ctx = this.canvasRef.value.getContext("2d");
39
+ ctx.drawImage(image, 0, 0);
29
40
  }
30
41
  });
31
42
  this.frameTask = new Task(this, {
43
+ autoRun: EF_INTERACTIVE,
32
44
  args: () => [this.fetchImage.status],
33
45
  task: async () => {
34
46
  await this.fetchImage.taskComplete;
@@ -36,7 +48,7 @@ let EFImage = class extends EFSourceMixin(FetchMixin(LitElement), {
36
48
  });
37
49
  }
38
50
  render() {
39
- return html`<img ${ref(this.imageRef)} src="${this.fetchImage.value}" />`;
51
+ return html`<canvas ${ref(this.canvasRef)}></canvas>`;
40
52
  }
41
53
  assetPath() {
42
54
  if (this.src.startsWith("http")) {
@@ -50,7 +62,7 @@ EFImage.styles = [
50
62
  :host {
51
63
  display: block;
52
64
  }
53
- img {
65
+ canvas {
54
66
  display: block;
55
67
  width: 100%;
56
68
  height: 100%;
@@ -3,13 +3,14 @@ import { EFTemporal } from "./EFTemporal.mjs";
3
3
  import { property, state } from "lit/decorators.js";
4
4
  import { deepArrayEquals } from "@lit/task/deep-equals.js";
5
5
  import { Task } from "@lit/task";
6
- import { MP4File } from "../../editor/util/MP4File.mjs";
6
+ import { MP4File } from "../../../editor/util/MP4File.mjs";
7
7
  import { getStartTimeMs } from "./util.mjs";
8
- import { VideoAsset } from "../../editor/util/EncodedAsset/EncodedAsset.mjs";
8
+ import { VideoAsset } from "../../../editor/util/EncodedAsset/EncodedAsset.mjs";
9
9
  import { FetchMixin } from "./FetchMixin.mjs";
10
10
  import { apiHostContext } from "../gui/EFWorkbench.mjs";
11
11
  import { consume } from "@lit/context";
12
12
  import { EFSourceMixin } from "./EFSourceMixin.mjs";
13
+ import { EF_INTERACTIVE } from "../EF_INTERACTIVE.mjs";
13
14
  var __defProp = Object.defineProperty;
14
15
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
15
16
  var __decorateClass = (decorators, target, key, kind) => {
@@ -21,6 +22,16 @@ var __decorateClass = (decorators, target, key, kind) => {
21
22
  __defProp(target, key, result);
22
23
  return result;
23
24
  };
25
+ const deepGetMediaElements = (element, medias = []) => {
26
+ for (const child of element.children) {
27
+ if (child instanceof EFMedia) {
28
+ medias.push(child);
29
+ } else {
30
+ deepGetMediaElements(child, medias);
31
+ }
32
+ }
33
+ return medias;
34
+ };
24
35
  class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
25
36
  assetType: "isobmff_files"
26
37
  }) {
@@ -30,6 +41,7 @@ class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
30
41
  this.trackFragmentIndexLoader = new Task(this, {
31
42
  args: () => [this.fragmentIndexPath(), this.fetch],
32
43
  task: async ([fragmentIndexPath, fetch], { signal }) => {
44
+ console.log("EFMedia trackFragmentIndexLoader");
33
45
  const response = await fetch(fragmentIndexPath, { signal });
34
46
  return await response.json();
35
47
  },
@@ -39,8 +51,10 @@ class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
39
51
  }
40
52
  });
41
53
  this.initSegmentsLoader = new Task(this, {
54
+ autoRun: EF_INTERACTIVE,
42
55
  args: () => [this.trackFragmentIndexLoader.value, this.src, this.fetch],
43
56
  task: async ([fragmentIndex, _src, fetch], { signal }) => {
57
+ console.log("EFMedia initSegmentsLoader");
44
58
  if (!fragmentIndex) {
45
59
  return;
46
60
  }
@@ -64,12 +78,14 @@ class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
64
78
  }
65
79
  });
66
80
  this.seekTask = new Task(this, {
81
+ autoRun: EF_INTERACTIVE,
67
82
  args: () => [
68
83
  this.desiredSeekTimeMs,
69
84
  this.trackFragmentIndexLoader.value,
70
85
  this.initSegmentsLoader.value
71
86
  ],
72
87
  task: async ([seekToMs, fragmentIndex, initSegments], { signal }) => {
88
+ console.log("EFMedia seekTask");
73
89
  if (fragmentIndex === void 0) {
74
90
  return;
75
91
  }
@@ -94,9 +110,11 @@ class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
94
110
  }
95
111
  });
96
112
  this.fetchSeekTask = new Task(this, {
113
+ autoRun: EF_INTERACTIVE,
97
114
  argsEqual: deepArrayEquals,
98
115
  args: () => [this.initSegmentsLoader.value, this.seekTask.value, this.fetch],
99
116
  task: async ([initSegments, seekResult, fetch], { signal }) => {
117
+ console.log("EFMedia fetchSeekTask");
100
118
  if (!initSegments) {
101
119
  return;
102
120
  }
@@ -124,8 +142,10 @@ class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
124
142
  }
125
143
  });
126
144
  this.videoAssetTask = new Task(this, {
145
+ autoRun: EF_INTERACTIVE,
127
146
  args: () => [this.fetchSeekTask.value],
128
147
  task: async ([files], { signal }) => {
148
+ console.log("EFMedia videoAssetTask");
129
149
  if (!files) {
130
150
  return;
131
151
  }
@@ -144,10 +164,12 @@ class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
144
164
  }
145
165
  });
146
166
  this.desiredSeekTimeMs = 0;
147
- this.#audioContext = new OfflineAudioContext(1, 48e3 / 30, 48e3);
167
+ this.#audioContext = new OfflineAudioContext(2, 48e3 / 30, 48e3);
148
168
  this.audioBufferTask = new Task(this, {
169
+ autoRun: EF_INTERACTIVE,
149
170
  args: () => [this.fetchSeekTask.value, this.seekTask.value],
150
171
  task: async ([files, segments], { signal }) => {
172
+ console.log("EFMedia audioBufferTask", this.outerHTML);
151
173
  if (!files) {
152
174
  return;
153
175
  }
@@ -239,6 +261,60 @@ class EFMedia extends EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
239
261
  return getStartTimeMs(this);
240
262
  }
241
263
  #audioContext;
264
+ async fetchAudioSpanningTime(fromMs, toMs) {
265
+ fromMs -= this.startTimeMs;
266
+ toMs -= this.startTimeMs;
267
+ await this.trackFragmentIndexLoader.taskComplete;
268
+ const audioTrackId = this.defaultAudioTrackId;
269
+ if (!audioTrackId) {
270
+ console.warn("No audio track found");
271
+ return;
272
+ }
273
+ const audioTrackIndex = this.trackFragmentIndexLoader.value?.[audioTrackId];
274
+ if (!audioTrackIndex) {
275
+ console.warn("No audio track found");
276
+ return;
277
+ }
278
+ const start = audioTrackIndex.initSegment.offset;
279
+ const end = audioTrackIndex.initSegment.offset + audioTrackIndex.initSegment.size - 1;
280
+ const audioInitFragmentRequest = this.fetch(
281
+ this.fragmentTrackPath(String(audioTrackId)),
282
+ {
283
+ headers: { Range: `bytes=${start}-${end}` }
284
+ }
285
+ );
286
+ const fragments = Object.values(audioTrackIndex.segments).filter(
287
+ (segment) => {
288
+ const segmentStartsBeforeEnd = segment.dts <= toMs * audioTrackIndex.timescale / 1e3;
289
+ const segmentEndsAfterStart = segment.dts + segment.duration >= fromMs * audioTrackIndex.timescale / 1e3;
290
+ return segmentStartsBeforeEnd && segmentEndsAfterStart;
291
+ }
292
+ );
293
+ console.log("FRAGMENTS SPANNING TIME", JSON.stringify(fragments));
294
+ const firstFragment = fragments[0];
295
+ const lastFragment = fragments[fragments.length - 1];
296
+ const fragmentStart = firstFragment.offset;
297
+ const fragmentEnd = lastFragment.offset + lastFragment.size - 1;
298
+ console.log("FETCHING BYTES", `bytes=${fragmentStart}-${fragmentEnd}`);
299
+ const audioFragmentRequest = this.fetch(
300
+ this.fragmentTrackPath(String(audioTrackId)),
301
+ {
302
+ headers: { Range: `bytes=${fragmentStart}-${fragmentEnd}` }
303
+ }
304
+ );
305
+ const initResponse = await audioInitFragmentRequest;
306
+ const dataResponse = await audioFragmentRequest;
307
+ const initBuffer = await initResponse.arrayBuffer();
308
+ const dataBuffer = await dataResponse.arrayBuffer();
309
+ const audioBlob = new Blob([initBuffer, dataBuffer], {
310
+ type: "audio/mp4"
311
+ });
312
+ return {
313
+ blob: audioBlob,
314
+ startMs: firstFragment.dts / audioTrackIndex.timescale * 1e3,
315
+ endMs: lastFragment.dts / audioTrackIndex.timescale * 1e3 + lastFragment.duration / audioTrackIndex.timescale * 1e3
316
+ };
317
+ }
242
318
  }
243
319
  __decorateClass([
244
320
  property({ type: Number })
@@ -251,5 +327,6 @@ __decorateClass([
251
327
  state()
252
328
  ], EFMedia.prototype, "desiredSeekTimeMs", 2);
253
329
  export {
254
- EFMedia
330
+ EFMedia,
331
+ deepGetMediaElements
255
332
  };
@@ -3,6 +3,7 @@ import { property, state } from "lit/decorators.js";
3
3
  import { EFTimegroup } from "./EFTimegroup.mjs";
4
4
  import { durationConverter } from "./durationConverter.mjs";
5
5
  import { Task } from "@lit/task";
6
+ import { EF_INTERACTIVE } from "../EF_INTERACTIVE.mjs";
6
7
  var __defProp = Object.defineProperty;
7
8
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
8
9
  var __decorateClass = (decorators, target, key, kind) => {
@@ -28,6 +29,15 @@ const deepGetTemporalElements = (element, temporals = []) => {
28
29
  }
29
30
  return temporals;
30
31
  };
32
+ const deepGetElementsWithFrameTasks = (element, elements = []) => {
33
+ for (const child of element.children) {
34
+ if ("frameTask" in child && child.frameTask instanceof Task) {
35
+ elements.push(child);
36
+ }
37
+ deepGetElementsWithFrameTasks(child, elements);
38
+ }
39
+ return elements;
40
+ };
31
41
  const shallowGetTemporalElements = (element, temporals = []) => {
32
42
  for (const child of element.children) {
33
43
  if (isEFTemporal(child)) {
@@ -68,9 +78,13 @@ const EFTemporal = (superClass) => {
68
78
  this._offsetMs = 0;
69
79
  this.rootTimegroup = this.getRootTimegroup();
70
80
  this.frameTask = new Task(this, {
71
- autoRun: false,
81
+ autoRun: EF_INTERACTIVE,
72
82
  args: () => [this.ownCurrentTimeMs],
73
83
  task: async ([], { signal }) => {
84
+ let fullyUpdated = await this.updateComplete;
85
+ while (!fullyUpdated) {
86
+ fullyUpdated = await this.updateComplete;
87
+ }
74
88
  }
75
89
  });
76
90
  }
@@ -111,6 +125,19 @@ const EFTemporal = (superClass) => {
111
125
  get offsetMs() {
112
126
  return this._offsetMs || 0;
113
127
  }
128
+ get parentTemporal() {
129
+ let parent = this.parentElement;
130
+ while (parent && !isEFTemporal(parent)) {
131
+ parent = parent.parentElement;
132
+ }
133
+ return parent;
134
+ }
135
+ get startTimeWithinParentMs() {
136
+ if (!this.parentTemporal) {
137
+ return 0;
138
+ }
139
+ return this.startTimeMs - this.parentTemporal.startTimeMs;
140
+ }
114
141
  get startTimeMs() {
115
142
  const parentTimegroup = this.parentTimegroup;
116
143
  if (!parentTimegroup) {
@@ -178,6 +205,7 @@ const EFTemporal = (superClass) => {
178
205
  export {
179
206
  EFTemporal,
180
207
  OwnCurrentTimeController,
208
+ deepGetElementsWithFrameTasks,
181
209
  deepGetTemporalElements,
182
210
  isEFTemporal,
183
211
  shallowGetTemporalElements,
@@ -3,6 +3,9 @@ import { provide } from "@lit/context";
3
3
  import { property, customElement } from "lit/decorators.js";
4
4
  import { EFTemporal, shallowGetTemporalElements, isEFTemporal, timegroupContext } from "./EFTemporal.mjs";
5
5
  import { TimegroupController } from "./TimegroupController.mjs";
6
+ import { EF_INTERACTIVE } from "../EF_INTERACTIVE.mjs";
7
+ import { deepGetMediaElements } from "./EFMedia.mjs";
8
+ import { Task } from "@lit/task";
6
9
  var __defProp = Object.defineProperty;
7
10
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
8
11
  var __decorateClass = (decorators, target, key, kind) => {
@@ -40,6 +43,16 @@ let EFTimegroup = class extends EFTemporal(LitElement) {
40
43
  __privateAdd(this, _currentTime, 0);
41
44
  this.mode = "sequence";
42
45
  this.crossoverMs = 0;
46
+ this.frameTask = new Task(this, {
47
+ autoRun: EF_INTERACTIVE,
48
+ args: () => [this.ownCurrentTimeMs, this.currentTimeMs],
49
+ task: async ([], { signal }) => {
50
+ let fullyUpdated = await this.updateComplete;
51
+ while (!fullyUpdated) {
52
+ fullyUpdated = await this.updateComplete;
53
+ }
54
+ }
55
+ });
43
56
  }
44
57
  set currentTime(time) {
45
58
  __privateSet(this, _currentTime, Math.max(0, Math.min(time, this.durationMs / 1e3)));
@@ -83,6 +96,9 @@ let EFTimegroup = class extends EFTemporal(LitElement) {
83
96
  if (this.parentTimegroup) {
84
97
  new TimegroupController(this.parentTimegroup, this);
85
98
  }
99
+ if (this.shouldWrapWithWorkbench()) {
100
+ this.wrapWithWorkbench();
101
+ }
86
102
  }
87
103
  get storageKey() {
88
104
  if (!this.id) {
@@ -126,12 +142,20 @@ let EFTimegroup = class extends EFTemporal(LitElement) {
126
142
  throw new Error(`Invalid time mode: ${this.mode}`);
127
143
  }
128
144
  }
145
+ async waitForMediaDurations() {
146
+ return await Promise.all(
147
+ deepGetMediaElements(this).map(
148
+ (media) => media.trackFragmentIndexLoader.taskComplete
149
+ )
150
+ );
151
+ }
129
152
  get childTemporals() {
130
153
  return shallowGetTemporalElements(this);
131
154
  }
132
155
  updated(changedProperties) {
133
156
  super.updated(changedProperties);
134
157
  if (changedProperties.has("currentTime") || changedProperties.has("ownCurrentTimeMs")) {
158
+ console.log("Updating animations to time", this.ownCurrentTimeMs);
135
159
  const animations = this.getAnimations({ subtree: true });
136
160
  this.style.setProperty(
137
161
  "--ef-duration",
@@ -176,9 +200,109 @@ let EFTimegroup = class extends EFTemporal(LitElement) {
176
200
  });
177
201
  }
178
202
  }
203
+ shouldWrapWithWorkbench() {
204
+ return EF_INTERACTIVE && this.closest("ef-timegroup") === this && this.closest("ef-workbench") === null;
205
+ }
206
+ wrapWithWorkbench() {
207
+ const workbench = document.createElement("ef-workbench");
208
+ document.body.append(workbench);
209
+ if (!this.hasAttribute("id")) {
210
+ this.setAttribute("id", "root-this");
211
+ }
212
+ this.setAttribute("slot", "canvas");
213
+ workbench.append(this);
214
+ const filmstrip = document.createElement("ef-filmstrip");
215
+ filmstrip.setAttribute("slot", "timeline");
216
+ filmstrip.setAttribute("target", `#${this.id}`);
217
+ workbench.append(filmstrip);
218
+ }
179
219
  get hasOwnDuration() {
180
220
  return true;
181
221
  }
222
+ get efElements() {
223
+ return Array.from(
224
+ this.querySelectorAll(
225
+ "ef-audio, ef-video, ef-image, ef-captions, ef-waveform"
226
+ )
227
+ );
228
+ }
229
+ async renderAudio(fromMs, toMs) {
230
+ await this.waitForMediaDurations();
231
+ const durationMs = toMs - fromMs;
232
+ const audioContext = new OfflineAudioContext(
233
+ 2,
234
+ Math.round(48e3 * durationMs / 1e3),
235
+ 48e3
236
+ );
237
+ console.log("RENDERING AUDIO");
238
+ console.log(
239
+ `renderAudio fromMs=${fromMs} toMs=${toMs} durationMs=${durationMs} ctxSize=${audioContext.length}`
240
+ );
241
+ await Promise.all(
242
+ deepGetMediaElements(this).map(async (mediaElement) => {
243
+ await mediaElement.trackFragmentIndexLoader.taskComplete;
244
+ const mediaStartsBeforeEnd = mediaElement.startTimeMs <= toMs;
245
+ const mediaEndsAfterStart = mediaElement.endTimeMs >= fromMs;
246
+ const mediaOverlaps = mediaStartsBeforeEnd && mediaEndsAfterStart;
247
+ if (!mediaOverlaps || mediaElement.defaultAudioTrackId === void 0) {
248
+ console.log("Skipping audio element due to lack of overlap");
249
+ return;
250
+ }
251
+ const audio = await mediaElement.fetchAudioSpanningTime(fromMs, toMs);
252
+ if (!audio) {
253
+ throw new Error("Failed to fetch audio");
254
+ }
255
+ const ctxStartMs = Math.max(0, mediaElement.startTimeMs - fromMs);
256
+ const ctxEndMs = Math.min(durationMs, mediaElement.endTimeMs - fromMs);
257
+ const ctxDurationMs = ctxEndMs - ctxStartMs;
258
+ const offset = Math.max(0, fromMs - mediaElement.startTimeMs) - audio.startMs;
259
+ console.log(
260
+ "AUDIO SPAN",
261
+ JSON.stringify({
262
+ fromMs,
263
+ toMs,
264
+ audio: {
265
+ startMs: audio.startMs,
266
+ endMs: audio.endMs
267
+ },
268
+ elementStart: mediaElement.startTimeMs,
269
+ elementEnd: mediaElement.endTimeMs,
270
+ ctxStart: ctxStartMs,
271
+ ctxEnd: ctxEndMs,
272
+ offset
273
+ })
274
+ );
275
+ const bufferSource = audioContext.createBufferSource();
276
+ bufferSource.buffer = await audioContext.decodeAudioData(
277
+ await audio.blob.arrayBuffer()
278
+ );
279
+ bufferSource.connect(audioContext.destination);
280
+ bufferSource.start(
281
+ ctxStartMs / 1e3,
282
+ offset / 1e3,
283
+ ctxDurationMs / 1e3
284
+ );
285
+ })
286
+ );
287
+ return await audioContext.startRendering();
288
+ }
289
+ async loadMd5Sums() {
290
+ const efElements = this.efElements;
291
+ const loaderTasks = [];
292
+ efElements.forEach((el) => {
293
+ const md5SumLoader = el.md5SumLoader;
294
+ if (md5SumLoader instanceof Task) {
295
+ md5SumLoader.run();
296
+ loaderTasks.push(md5SumLoader.taskComplete);
297
+ }
298
+ });
299
+ await Promise.all(loaderTasks);
300
+ efElements.map((el) => {
301
+ if ("productionSrc" in el && el.productionSrc instanceof Function) {
302
+ el.setAttribute("src", el.productionSrc());
303
+ }
304
+ });
305
+ }
182
306
  };
183
307
  _currentTime = /* @__PURE__ */ new WeakMap();
184
308
  EFTimegroup.styles = css`
@@ -49,12 +49,14 @@ let EFVideo = class extends TWMixin(EFMedia) {
49
49
  this.paintTask.status
50
50
  ],
51
51
  task: async () => {
52
+ console.log("EFVideo frameTask", this.ownCurrentTimeMs);
52
53
  await this.trackFragmentIndexLoader.taskComplete;
53
54
  await this.initSegmentsLoader.taskComplete;
54
55
  await this.seekTask.taskComplete;
55
56
  await this.fetchSeekTask.taskComplete;
56
57
  await this.videoAssetTask.taskComplete;
57
58
  await this.paintTask.taskComplete;
59
+ console.log("EFVideo frameTask complete", this.ownCurrentTimeMs);
58
60
  this.rootTimegroup?.requestUpdate();
59
61
  }
60
62
  });
@@ -63,6 +65,7 @@ let EFVideo = class extends TWMixin(EFMedia) {
63
65
  task: async ([videoAsset, seekToMs], {
64
66
  signal: _signal
65
67
  }) => {
68
+ console.log(`EFVideo paintTask decoderLock=${__privateGet(this, _decoderLock)}`);
66
69
  if (!videoAsset) {
67
70
  return;
68
71
  }
@@ -72,6 +75,13 @@ let EFVideo = class extends TWMixin(EFMedia) {
72
75
  try {
73
76
  __privateSet(this, _decoderLock, true);
74
77
  const frame = await videoAsset.seekToTime(seekToMs / 1e3);
78
+ console.log(
79
+ "Painting frame",
80
+ "seekToMs",
81
+ seekToMs,
82
+ "timestamp",
83
+ frame?.timestamp
84
+ );
75
85
  if (!this.canvasElement) {
76
86
  return;
77
87
  }