@editframe/elements 0.11.0-beta.9 → 0.12.0-beta.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/dist/EF_FRAMEGEN.d.ts +8 -15
  2. package/dist/assets/src/MP4File.js +73 -20
  3. package/dist/elements/EFCaptions.d.ts +50 -6
  4. package/dist/elements/EFMedia.d.ts +1 -2
  5. package/dist/elements/EFTimegroup.browsertest.d.ts +4 -0
  6. package/dist/elements/EFTimegroup.d.ts +23 -2
  7. package/dist/elements/EFWaveform.d.ts +15 -11
  8. package/dist/elements/src/EF_FRAMEGEN.js +24 -26
  9. package/dist/elements/src/elements/EFCaptions.js +295 -42
  10. package/dist/elements/src/elements/EFImage.js +0 -6
  11. package/dist/elements/src/elements/EFMedia.js +70 -18
  12. package/dist/elements/src/elements/EFTemporal.js +13 -10
  13. package/dist/elements/src/elements/EFTimegroup.js +37 -12
  14. package/dist/elements/src/elements/EFVideo.js +1 -4
  15. package/dist/elements/src/elements/EFWaveform.js +250 -143
  16. package/dist/elements/src/gui/ContextMixin.js +44 -11
  17. package/dist/elements/src/gui/EFPreview.js +3 -1
  18. package/dist/elements/src/gui/EFScrubber.js +142 -0
  19. package/dist/elements/src/gui/EFTimeDisplay.js +81 -0
  20. package/dist/elements/src/gui/EFTogglePlay.js +11 -19
  21. package/dist/elements/src/gui/EFWorkbench.js +1 -24
  22. package/dist/elements/src/gui/TWMixin.css.js +1 -1
  23. package/dist/elements/src/index.js +8 -1
  24. package/dist/gui/ContextMixin.d.ts +2 -1
  25. package/dist/gui/EFScrubber.d.ts +23 -0
  26. package/dist/gui/EFTimeDisplay.d.ts +17 -0
  27. package/dist/gui/EFTogglePlay.d.ts +0 -2
  28. package/dist/gui/EFWorkbench.d.ts +0 -1
  29. package/dist/index.d.ts +3 -1
  30. package/dist/style.css +6 -801
  31. package/package.json +2 -2
  32. package/src/elements/EFCaptions.browsertest.ts +6 -6
  33. package/src/elements/EFCaptions.ts +325 -56
  34. package/src/elements/EFImage.browsertest.ts +4 -17
  35. package/src/elements/EFImage.ts +0 -6
  36. package/src/elements/EFMedia.browsertest.ts +10 -19
  37. package/src/elements/EFMedia.ts +87 -20
  38. package/src/elements/EFTemporal.browsertest.ts +14 -0
  39. package/src/elements/EFTemporal.ts +14 -0
  40. package/src/elements/EFTimegroup.browsertest.ts +37 -0
  41. package/src/elements/EFTimegroup.ts +42 -17
  42. package/src/elements/EFVideo.ts +1 -4
  43. package/src/elements/EFWaveform.ts +339 -314
  44. package/src/gui/ContextMixin.browsertest.ts +28 -2
  45. package/src/gui/ContextMixin.ts +52 -14
  46. package/src/gui/EFPreview.ts +4 -2
  47. package/src/gui/EFScrubber.ts +145 -0
  48. package/src/gui/EFTimeDisplay.ts +81 -0
  49. package/src/gui/EFTogglePlay.ts +19 -25
  50. package/src/gui/EFWorkbench.ts +3 -36
  51. package/dist/elements/src/elements/util.js +0 -11
@@ -3,12 +3,12 @@ declare global {
3
3
  interface Window {
4
4
  EF_FRAMEGEN?: EfFramegen;
5
5
  FRAMEGEN_BRIDGE?: {
6
- onInitialize: (callback: (renderId: string, renderOptions: VideoRenderOptions) => void) => void;
7
- initialized(renderId: string): void;
8
- onBeginFrame(callback: (renderId: string, frameNumber: number, isLast: boolean) => void): void;
6
+ onInitialize: (callback: (renderOptions: VideoRenderOptions) => void) => void;
7
+ initialized(): void;
8
+ onBeginFrame(callback: (frameNumber: number, isLast: boolean) => void): void;
9
9
  onTriggerCanvas(callback: () => void): void;
10
- frameReady(renderId: string, frameNumber: number, audioSamples: ArrayBuffer): void;
11
- error(renderId: string, error: Error): void;
10
+ frameReady(frameNumber: number, audioSamples: ArrayBuffer): void;
11
+ error(error: Error): void;
12
12
  };
13
13
  }
14
14
  }
@@ -25,19 +25,12 @@ export declare class EfFramegen {
25
25
  audioBufferPromise?: Promise<AudioBuffer>;
26
26
  renderOptions?: VideoRenderOptions;
27
27
  frameBox: HTMLDivElement;
28
- BRIDGE: {
29
- onInitialize: (callback: (renderId: string, renderOptions: VideoRenderOptions) => void) => void;
30
- initialized(renderId: string): void;
31
- onBeginFrame(callback: (renderId: string, frameNumber: number, isLast: boolean) => void): void;
32
- onTriggerCanvas(callback: () => void): void;
33
- frameReady(renderId: string, frameNumber: number, audioSamples: ArrayBuffer): void;
34
- error(renderId: string, error: Error): void;
35
- } | undefined;
28
+ BRIDGE: typeof window.FRAMEGEN_BRIDGE;
36
29
  triggerCanvas: TriggerCanvas;
37
30
  trace(...args: any[]): void;
38
31
  constructor();
39
32
  connectToBridge(): void;
40
- initialize(renderId: string, renderOptions: VideoRenderOptions): Promise<void>;
41
- beginFrame(renderId: string, frameNumber: number, isLast: boolean): Promise<string | ArrayBuffer | null | undefined>;
33
+ initialize(renderOptions: VideoRenderOptions): Promise<void>;
34
+ beginFrame(frameNumber: number, isLast: boolean): Promise<string | ArrayBuffer | null | undefined>;
42
35
  }
43
36
  export {};
@@ -40,13 +40,13 @@ class MP4File extends MP4Box.ISOFile {
40
40
  await this.readyPromise;
41
41
  const trackInfo = {};
42
42
  for (const videoTrack of this.getInfo().videoTracks) {
43
- trackInfo[videoTrack.id] = { index: 0, complete: false };
43
+ trackInfo[videoTrack.id] = { index: 0 };
44
44
  this.setSegmentOptions(videoTrack.id, null, {
45
45
  rapAlignement: true
46
46
  });
47
47
  }
48
48
  for (const audioTrack of this.getInfo().audioTracks) {
49
- trackInfo[audioTrack.id] = { index: 0, complete: false };
49
+ trackInfo[audioTrack.id] = { index: 0 };
50
50
  const sampleRate = audioTrack.audio.sample_rate;
51
51
  const probablePacketSize = 1024;
52
52
  const probableFourSecondsOfSamples = Math.ceil(
@@ -61,21 +61,12 @@ class MP4File extends MP4Box.ISOFile {
61
61
  yield {
62
62
  track: initSegment.id,
63
63
  segment: "init",
64
- data: initSegment.buffer,
65
- complete: false
64
+ data: initSegment.buffer
66
65
  };
67
66
  }
68
67
  const fragmentStartSamples = {};
69
68
  let finishedReading = false;
70
- const allTracksFinished = () => {
71
- for (const fragmentedTrack of this.fragmentedTracks) {
72
- if (!trackInfo[fragmentedTrack.id]?.complete) {
73
- return false;
74
- }
75
- }
76
- return true;
77
- };
78
- while (!(finishedReading && allTracksFinished())) {
69
+ do {
79
70
  for (const fragTrak of this.fragmentedTracks) {
80
71
  const trak = fragTrak.trak;
81
72
  if (trak.nextSample === void 0) {
@@ -84,6 +75,8 @@ class MP4File extends MP4Box.ISOFile {
84
75
  if (trak.samples === void 0) {
85
76
  throw new Error("trak.samples is undefined");
86
77
  }
78
+ log("trak.nextSample", fragTrak.id, trak.nextSample);
79
+ log("trak.samples.length", fragTrak.id, trak.samples.length);
87
80
  while (trak.nextSample < trak.samples.length) {
88
81
  let result = void 0;
89
82
  const fragTrakNextSample = trak.samples[trak.nextSample];
@@ -120,22 +113,20 @@ class MP4File extends MP4Box.ISOFile {
120
113
  if (!trackInfoForFrag) {
121
114
  throw new Error("trackInfoForFrag is undefined");
122
115
  }
123
- if (trak.nextSample >= trak.samples.length) {
124
- trackInfoForFrag.complete = true;
125
- }
126
- log(
127
- `Yielding fragment #${trackInfoForFrag.index} for track=${fragTrak.id}`
128
- );
129
116
  const startSample = fragmentStartSamples[fragTrak.id];
130
117
  const endSample = trak.samples[trak.nextSample - 1];
131
118
  if (!startSample || !endSample) {
132
119
  throw new Error("startSample or endSample is undefined");
133
120
  }
121
+ log(
122
+ `Yielding fragment #${trackInfoForFrag.index} for track=${fragTrak.id}`,
123
+ `startTime=${startSample.cts}`,
124
+ `endTime=${endSample.cts + endSample.duration}`
125
+ );
134
126
  yield {
135
127
  track: fragTrak.id,
136
128
  segment: trackInfoForFrag.index,
137
129
  data: fragTrak.segmentStream.buffer,
138
- complete: trackInfoForFrag.complete,
139
130
  cts: startSample.cts,
140
131
  dts: startSample.dts,
141
132
  duration: endSample.cts - startSample.cts + endSample.duration
@@ -147,6 +138,68 @@ class MP4File extends MP4Box.ISOFile {
147
138
  }
148
139
  }
149
140
  finishedReading = await this.waitForMoreSamples();
141
+ } while (!finishedReading);
142
+ for (const fragTrak of this.fragmentedTracks) {
143
+ const trak = fragTrak.trak;
144
+ if (trak.nextSample === void 0) {
145
+ throw new Error("trak.nextSample is undefined");
146
+ }
147
+ if (trak.samples === void 0) {
148
+ throw new Error("trak.samples is undefined");
149
+ }
150
+ while (trak.nextSample < trak.samples.length) {
151
+ let result = void 0;
152
+ try {
153
+ result = this.createFragment(
154
+ fragTrak.id,
155
+ trak.nextSample,
156
+ fragTrak.segmentStream
157
+ );
158
+ } catch (error) {
159
+ console.error("Failed to createFragment", error);
160
+ }
161
+ if (result) {
162
+ fragTrak.segmentStream = result;
163
+ trak.nextSample++;
164
+ } else {
165
+ finishedReading = await this.waitForMoreSamples();
166
+ break;
167
+ }
168
+ const nextSample = trak.samples[trak.nextSample];
169
+ const emitSegment = (
170
+ // if rapAlignement is true, we emit a fragment when we have a rap sample coming up next
171
+ fragTrak.rapAlignement === true && nextSample?.is_sync || // if rapAlignement is false, we emit a fragment when we have the required number of samples
172
+ !fragTrak.rapAlignement && trak.nextSample % fragTrak.nb_samples === 0 || // if we have more samples than the number of samples requested, we emit the fragment
173
+ trak.nextSample >= trak.samples.length
174
+ );
175
+ if (emitSegment) {
176
+ const trackInfoForFrag = trackInfo[fragTrak.id];
177
+ if (!trackInfoForFrag) {
178
+ throw new Error("trackInfoForFrag is undefined");
179
+ }
180
+ const startSample = fragmentStartSamples[fragTrak.id];
181
+ const endSample = trak.samples[trak.nextSample - 1];
182
+ if (!startSample || !endSample) {
183
+ throw new Error("startSample or endSample is undefined");
184
+ }
185
+ log(
186
+ `Yielding fragment #${trackInfoForFrag.index} for track=${fragTrak.id}`,
187
+ `startTime=${startSample.cts}`,
188
+ `endTime=${endSample.cts + endSample.duration}`
189
+ );
190
+ yield {
191
+ track: fragTrak.id,
192
+ segment: trackInfoForFrag.index,
193
+ data: fragTrak.segmentStream.buffer,
194
+ cts: startSample.cts,
195
+ dts: startSample.dts,
196
+ duration: endSample.cts - startSample.cts + endSample.duration
197
+ };
198
+ trackInfoForFrag.index += 1;
199
+ fragTrak.segmentStream = null;
200
+ delete fragmentStartSamples[fragTrak.id];
201
+ }
202
+ }
150
203
  }
151
204
  }
152
205
  waitForMoreSamples() {
@@ -1,38 +1,82 @@
1
- import { LitElement, PropertyValueMap } from 'lit';
2
1
  import { Task } from '@lit/task';
3
- import { EFVideo } from './EFVideo.ts';
2
+ import { LitElement, PropertyValueMap } from 'lit';
4
3
  import { EFAudio } from './EFAudio.ts';
4
+ import { EFVideo } from './EFVideo.ts';
5
5
  declare const EFCaptionsActiveWord_base: (new (...args: any[]) => import('./EFTemporal.ts').TemporalMixinInterface) & typeof LitElement;
6
6
  export declare class EFCaptionsActiveWord extends EFCaptionsActiveWord_base {
7
7
  static styles: import('lit').CSSResult[];
8
- render(): import('lit-html').TemplateResult<1>;
8
+ render(): import('lit-html').TemplateResult<1> | undefined;
9
9
  wordStartMs: number;
10
10
  wordEndMs: number;
11
11
  wordText: string;
12
+ hidden: boolean;
13
+ get startTimeMs(): number;
14
+ get durationMs(): number;
15
+ }
16
+ declare const EFCaptionsSegment_base: (new (...args: any[]) => import('./EFTemporal.ts').TemporalMixinInterface) & typeof LitElement;
17
+ export declare class EFCaptionsSegment extends EFCaptionsSegment_base {
18
+ static styles: import('lit').CSSResult[];
19
+ render(): import('lit-html').TemplateResult<1> | undefined;
20
+ segmentStartMs: number;
21
+ segmentEndMs: number;
22
+ segmentText: string;
23
+ hidden: boolean;
24
+ get startTimeMs(): number;
25
+ get durationMs(): number;
26
+ }
27
+ export declare class EFCaptionsBeforeActiveWord extends EFCaptionsSegment {
28
+ static styles: import('lit').CSSResult[];
29
+ render(): import('lit-html').TemplateResult<1> | undefined;
30
+ hidden: boolean;
31
+ segmentText: string;
32
+ segmentStartMs: number;
33
+ segmentEndMs: number;
34
+ get startTimeMs(): number;
35
+ get durationMs(): number;
36
+ }
37
+ export declare class EFCaptionsAfterActiveWord extends EFCaptionsSegment {
38
+ static styles: import('lit').CSSResult[];
39
+ render(): import('lit-html').TemplateResult<1> | undefined;
40
+ hidden: boolean;
41
+ segmentText: string;
42
+ segmentStartMs: number;
43
+ segmentEndMs: number;
12
44
  get startTimeMs(): number;
13
45
  get durationMs(): number;
14
46
  }
15
47
  declare const EFCaptions_base: (new (...args: any[]) => import('./EFSourceMixin.ts').EFSourceMixinInterface) & (new (...args: any[]) => import('./EFTemporal.ts').TemporalMixinInterface) & (new (...args: any[]) => import('./FetchMixin.ts').FetchMixinInterface) & typeof LitElement;
16
48
  export declare class EFCaptions extends EFCaptions_base {
17
49
  static styles: import('lit').CSSResult[];
50
+ displayMode: "word" | "segment" | "context";
51
+ contextWords: number;
18
52
  targetSelector: string;
19
53
  set target(value: string);
20
54
  wordStyle: string;
21
55
  activeWordContainers: HTMLCollectionOf<EFCaptionsActiveWord>;
56
+ segmentContainers: HTMLCollectionOf<EFCaptionsSegment>;
57
+ beforeActiveWordContainers: HTMLCollectionOf<EFCaptionsBeforeActiveWord>;
58
+ afterActiveWordContainers: HTMLCollectionOf<EFCaptionsAfterActiveWord>;
59
+ render(): import('lit-html').TemplateResult<1>;
60
+ transcriptionsPath(): string | null;
22
61
  captionsPath(): string;
23
62
  protected md5SumLoader: Task<readonly [string, typeof fetch], string | undefined>;
24
- private captionsDataTask;
63
+ private transcriptionDataTask;
64
+ private transcriptionFragmentPath;
65
+ private fragmentIndexTask;
66
+ private transcriptionFragmentDataTask;
25
67
  frameTask: Task<import('@lit/task').TaskStatus[], void>;
26
68
  connectedCallback(): void;
27
- render(): import('lit-html').TemplateResult<1> | undefined;
28
69
  protected updated(_changedProperties: PropertyValueMap<any> | Map<PropertyKey, unknown>): void;
29
- updateActiveWord(): void;
70
+ updateTextContainers(): void;
30
71
  get targetElement(): EFAudio | EFVideo;
31
72
  }
32
73
  declare global {
33
74
  interface HTMLElementTagNameMap {
34
75
  "ef-captions": EFCaptions;
35
76
  "ef-captions-active-word": EFCaptionsActiveWord;
77
+ "ef-captions-segment": EFCaptionsSegment;
78
+ "ef-captions-before-active-word": EFCaptionsBeforeActiveWord;
79
+ "ef-captions-after-active-word": EFCaptionsAfterActiveWord;
36
80
  }
37
81
  }
38
82
  export {};
@@ -16,7 +16,7 @@ export declare class EFMedia extends EFMedia_base {
16
16
  fragmentIndexPath(): string;
17
17
  fragmentTrackPath(trackId: string): string;
18
18
  trackFragmentIndexLoader: Task<readonly [string, typeof fetch], Record<number, TrackFragmentIndex>>;
19
- protected initSegmentsLoader: Task<readonly [Record<number, TrackFragmentIndex> | undefined, string, typeof fetch], {
19
+ initSegmentsLoader: Task<readonly [Record<number, TrackFragmentIndex> | undefined, string, typeof fetch], {
20
20
  trackId: string;
21
21
  buffer: MP4Box.MP4ArrayBuffer;
22
22
  mp4File: MP4File;
@@ -47,7 +47,6 @@ export declare class EFMedia extends EFMedia_base {
47
47
  protected updated(changedProperties: PropertyValueMap<any> | Map<PropertyKey, unknown>): void;
48
48
  get hasOwnDuration(): boolean;
49
49
  get durationMs(): number;
50
- get startTimeMs(): number;
51
50
  audioBufferTask: Task<readonly [Record<string, File> | undefined, Record<string, {
52
51
  segment: TrackSegment;
53
52
  track: MP4Box.TrackInfo;
@@ -1,4 +1,7 @@
1
1
  import { LitElement } from 'lit';
2
+ declare const TestContext_base: (new (...args: any[]) => import('../gui/ContextMixin.ts').ContextMixinInterface) & typeof LitElement;
3
+ declare class TestContext extends TestContext_base {
4
+ }
2
5
  declare const TestTemporal_base: (new (...args: any[]) => import('./EFTemporal.ts').TemporalMixinInterface) & typeof LitElement;
3
6
  declare class TestTemporal extends TestTemporal_base {
4
7
  get hasOwnDuration(): boolean;
@@ -6,6 +9,7 @@ declare class TestTemporal extends TestTemporal_base {
6
9
  declare global {
7
10
  interface HTMLElementTagNameMap {
8
11
  "test-temporal": TestTemporal;
12
+ "test-context": TestContext;
9
13
  }
10
14
  }
11
15
  export {};
@@ -1,5 +1,5 @@
1
- import { LitElement, PropertyValueMap } from 'lit';
2
1
  import { Task } from '@lit/task';
2
+ import { LitElement, PropertyValueMap } from 'lit';
3
3
  export declare const shallowGetTimegroups: (element: Element, groups?: EFTimegroup[]) => EFTimegroup[];
4
4
  declare const EFTimegroup_base: (new (...args: any[]) => import('./EFTemporal.ts').TemporalMixinInterface) & typeof LitElement;
5
5
  export declare class EFTimegroup extends EFTimegroup_base {
@@ -17,9 +17,30 @@ export declare class EFTimegroup extends EFTimegroup_base {
17
17
  connectedCallback(): void;
18
18
  get storageKey(): string;
19
19
  get durationMs(): number;
20
- waitForMediaDurations(): Promise<Record<number, import('../../../assets/src/index.ts').TrackFragmentIndex>[]>;
20
+ /**
21
+ * Wait for all media elements to load their initial segments.
22
+ * Ideally we would only need the extracted index json data, but
23
+ * that caused issues with constructing audio data. We had negative durations
24
+ * in calculations and it was not clear why.
25
+ */
26
+ waitForMediaDurations(): Promise<({
27
+ trackId: string;
28
+ buffer: import('mp4box').MP4ArrayBuffer;
29
+ mp4File: import('../../../assets/src/MP4File.ts').MP4File;
30
+ }[] | undefined)[]>;
21
31
  get childTemporals(): import('./EFTemporal.ts').TemporalMixinInterface[];
22
32
  protected updated(changedProperties: PropertyValueMap<any> | Map<PropertyKey, unknown>): void;
33
+ get contextProvider(): import('../gui/ContextMixin.ts').ContextMixinInterface | null;
34
+ /**
35
+ * Returns true if the timegroup should be wrapped with a workbench.
36
+ *
37
+ * A timegroup should be wrapped with a workbench if it is the root-most timegroup
38
+ * and EF_INTERACTIVE is true.
39
+ *
40
+ * If the timegroup is already wrappedin a context provider like ef-preview,
41
+ * it should NOT be wrapped in a workbench.
42
+ *
43
+ */
23
44
  shouldWrapWithWorkbench(): boolean;
24
45
  wrapWithWorkbench(): void;
25
46
  get hasOwnDuration(): boolean;
@@ -1,12 +1,13 @@
1
1
  import { EFAudio } from './EFAudio.ts';
2
2
  import { Task } from '@lit/task';
3
- import { LitElement } from 'lit';
3
+ import { LitElement, PropertyValueMap } from 'lit';
4
4
  import { Ref } from 'lit/directives/ref.js';
5
5
  import { EFVideo } from './EFVideo.ts';
6
6
  declare const EFWaveform_base: (new (...args: any[]) => import('./EFTemporal.ts').TemporalMixinInterface) & typeof LitElement;
7
7
  export declare class EFWaveform extends EFWaveform_base {
8
8
  static styles: import('lit').CSSResult[];
9
- svgRef: Ref<SVGElement>;
9
+ canvasRef: Ref<HTMLCanvasElement>;
10
+ private ctx;
10
11
  createRenderRoot(): this;
11
12
  render(): import('lit-html').TemplateResult<1>;
12
13
  mode: "roundBars" | "bars" | "bricks" | "equalizer" | "curve" | "line" | "pixel" | "wave";
@@ -14,16 +15,19 @@ export declare class EFWaveform extends EFWaveform_base {
14
15
  targetSelector: string;
15
16
  set target(value: string);
16
17
  connectedCallback(): void;
17
- protected drawBars(svg: SVGElement, frequencyData: Uint8Array): void;
18
- protected drawBricks(svg: SVGElement, frequencyData: Uint8Array): void;
19
- protected drawLine(svg: SVGElement, frequencyData: Uint8Array): void;
20
- protected drawRoundBars(svg: SVGElement, frequencyData: Uint8Array): void;
21
- protected drawEqualizer(svg: SVGElement, frequencyData: Uint8Array): void;
22
- protected drawCurve(svg: SVGElement, frequencyData: Uint8Array): void;
23
- protected drawPixel(svg: SVGElement, frequencyData: Uint8Array): void;
24
- protected drawWave(svg: SVGElement, frequencyData: Uint8Array): void;
18
+ protected initCanvas(): CanvasRenderingContext2D | null;
19
+ protected drawBars(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
20
+ protected drawBricks(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
21
+ protected drawLine(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
22
+ protected drawRoundBars(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
23
+ protected drawEqualizer(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
24
+ protected drawCurve(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
25
+ protected drawPixel(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
26
+ protected drawWave(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
27
+ private lastFrameTime;
25
28
  frameTask: Task<readonly [import('@lit/task').TaskStatus], void>;
26
- protected updated(): Promise<void>;
29
+ get durationMs(): number;
27
30
  get targetElement(): EFAudio | EFVideo;
31
+ protected updated(changedProperties: PropertyValueMap<this>): void;
28
32
  }
29
33
  export {};
@@ -1,8 +1,6 @@
1
- import debug from "debug";
2
1
  import { TaskStatus } from "@lit/task";
3
2
  import { deepGetElementsWithFrameTasks } from "./elements/EFTemporal.js";
4
3
  import { shallowGetTimegroups } from "./elements/EFTimegroup.js";
5
- const log = debug("ef:elements:EF_FRAMEGEN");
6
4
  class TriggerCanvas {
7
5
  constructor() {
8
6
  this.canvas = document.createElement("canvas");
@@ -23,7 +21,7 @@ class TriggerCanvas {
23
21
  this.ctx.fillStyle = "black";
24
22
  }
25
23
  trigger() {
26
- log("TRIGGERING CANVAS");
24
+ console.log("TRIGGERING CANVAS");
27
25
  this.ctx.clearRect(0, 0, this.canvas.width, this.canvas.height);
28
26
  }
29
27
  }
@@ -33,9 +31,10 @@ class EfFramegen {
33
31
  this.frameDurationMs = 0;
34
32
  this.initialBusyTasks = Promise.resolve([]);
35
33
  this.frameBox = document.createElement("div");
36
- this.BRIDGE = window.FRAMEGEN_BRIDGE;
37
34
  this.triggerCanvas = new TriggerCanvas();
35
+ this.BRIDGE = window.FRAMEGEN_BRIDGE;
38
36
  if (this.BRIDGE) {
37
+ console.log("BRIDGE.constructor (connecting to bridge)");
39
38
  this.connectToBridge();
40
39
  }
41
40
  }
@@ -47,27 +46,28 @@ class EfFramegen {
47
46
  if (!BRIDGE) {
48
47
  throw new Error("No BRIDGE when attempting to connect to bridge");
49
48
  }
50
- BRIDGE.onInitialize(async (renderId, renderOptions) => {
51
- log("BRIDGE.onInitialize", renderId, renderOptions);
52
- await this.initialize(renderId, renderOptions);
53
- BRIDGE.initialized(renderId);
49
+ console.log("BRIDGE.connectToBridge");
50
+ BRIDGE.onInitialize(async (renderOptions) => {
51
+ console.log("BRIDGE.onInitialize", renderOptions);
52
+ await this.initialize(renderOptions);
53
+ BRIDGE.initialized();
54
54
  });
55
- BRIDGE.onBeginFrame((renderId, frameNumber, isLast) => {
56
- log("BRIDGE.onBeginFrame", renderId, frameNumber, isLast);
57
- this.beginFrame(renderId, frameNumber, isLast);
55
+ BRIDGE.onBeginFrame((frameNumber, isLast) => {
56
+ console.log("BRIDGE.onBeginFrame", frameNumber, isLast);
57
+ this.beginFrame(frameNumber, isLast);
58
58
  });
59
59
  }
60
- async initialize(renderId, renderOptions) {
60
+ async initialize(renderOptions) {
61
61
  addEventListener("unhandledrejection", (event) => {
62
62
  this.trace("Unhandled rejection:", event.reason);
63
63
  if (this.BRIDGE) {
64
- this.BRIDGE.error(renderId, event.reason);
64
+ this.BRIDGE.error(event.reason);
65
65
  }
66
66
  });
67
67
  addEventListener("error", (event) => {
68
68
  this.trace("Uncaught error", event.error);
69
69
  if (this.BRIDGE) {
70
- this.BRIDGE.error(renderId, event.error);
70
+ this.BRIDGE.error(event.error);
71
71
  }
72
72
  });
73
73
  this.renderOptions = renderOptions;
@@ -107,9 +107,9 @@ class EfFramegen {
107
107
  // renderOptions.encoderOptions.fromMs,
108
108
  // renderOptions.encoderOptions.toMs,
109
109
  );
110
- log("Initialized");
110
+ console.log("Initialized");
111
111
  }
112
- async beginFrame(renderId, frameNumber, isLast) {
112
+ async beginFrame(frameNumber, isLast) {
113
113
  if (this.renderOptions === void 0) {
114
114
  throw new Error("No renderOptions");
115
115
  }
@@ -132,18 +132,20 @@ class EfFramegen {
132
132
  }
133
133
  this.time = this.renderOptions.encoderOptions.fromMs + frameNumber * this.frameDurationMs;
134
134
  firstGroup.currentTimeMs = this.time;
135
- log("Awaiting initialBusyTasks");
135
+ console.log("Awaiting initialBusyTasks");
136
136
  await this.initialBusyTasks;
137
- log("Awaiting microtask");
137
+ console.log("Awaiting microtask");
138
138
  await new Promise(queueMicrotask);
139
- log("Awaiting frame tasks");
139
+ console.log("Awaiting frame tasks");
140
140
  const now = performance.now();
141
141
  await Promise.all(
142
142
  temporals.filter((temporal) => temporal.frameTask.status < TaskStatus.COMPLETE).map((temporal) => {
143
143
  return temporal.frameTask;
144
144
  }).map((task) => task.taskComplete)
145
145
  );
146
- log(`frame:${frameNumber} All tasks complete ${performance.now() - now}ms`);
146
+ console.log(
147
+ `frame:${frameNumber} All tasks complete ${performance.now() - now}ms`
148
+ );
147
149
  if (isLast && this.audioBufferPromise) {
148
150
  const renderedAudio = await this.audioBufferPromise;
149
151
  const channelCount = renderedAudio.numberOfChannels;
@@ -160,11 +162,7 @@ class EfFramegen {
160
162
  }
161
163
  if (this.BRIDGE) {
162
164
  this.triggerCanvas.trigger();
163
- this.BRIDGE.frameReady(
164
- renderId,
165
- frameNumber,
166
- interleavedSamples.buffer
167
- );
165
+ this.BRIDGE.frameReady(frameNumber, interleavedSamples.buffer);
168
166
  } else {
169
167
  const fileReader = new FileReader();
170
168
  fileReader.readAsDataURL(new Blob([interleavedSamples.buffer]));
@@ -177,7 +175,7 @@ class EfFramegen {
177
175
  } else {
178
176
  if (this.BRIDGE) {
179
177
  this.triggerCanvas.trigger();
180
- this.BRIDGE.frameReady(renderId, frameNumber, new ArrayBuffer(0));
178
+ this.BRIDGE.frameReady(frameNumber, new ArrayBuffer(0));
181
179
  } else {
182
180
  const fileReader = new FileReader();
183
181
  fileReader.readAsDataURL(new Blob([]));