@editframe/elements 0.15.0-beta.1 → 0.15.0-beta.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/EF_FRAMEGEN.js +0 -2
- package/dist/elements/EFAudio.d.ts +0 -1
- package/dist/elements/EFAudio.js +1 -5
- package/dist/elements/EFCaptions.js +1 -1
- package/dist/elements/EFMedia.d.ts +2 -1
- package/dist/elements/EFMedia.js +125 -14
- package/dist/elements/EFTemporal.d.ts +3 -3
- package/dist/elements/EFTemporal.js +6 -2
- package/dist/elements/EFTimegroup.d.ts +1 -5
- package/dist/elements/EFTimegroup.js +4 -5
- package/dist/elements/EFWaveform.d.ts +14 -6
- package/dist/elements/EFWaveform.js +155 -53
- package/dist/elements/TargetController.d.ts +25 -0
- package/dist/elements/TargetController.js +164 -0
- package/dist/elements/TargetController.test.d.ts +19 -0
- package/dist/getRenderInfo.d.ts +51 -0
- package/dist/getRenderInfo.js +72 -0
- package/dist/gui/EFPreview.d.ts +1 -1
- package/dist/gui/EFPreview.js +1 -0
- package/dist/gui/TWMixin.css.js +1 -1
- package/dist/index.d.ts +1 -0
- package/dist/index.js +4 -1
- package/dist/style.css +3 -0
- package/package.json +11 -5
- package/src/elements/EFAudio.ts +1 -4
- package/src/elements/EFCaptions.ts +1 -1
- package/src/elements/EFMedia.ts +158 -22
- package/src/elements/EFTemporal.ts +10 -10
- package/src/elements/EFTimegroup.ts +4 -9
- package/src/elements/EFWaveform.ts +214 -70
- package/src/elements/TargetController.test.ts +229 -0
- package/src/elements/TargetController.ts +219 -0
- package/src/gui/EFPreview.ts +10 -9
- package/types.json +1 -0
package/dist/EF_FRAMEGEN.js
CHANGED
|
@@ -104,8 +104,6 @@ class EfFramegen {
|
|
|
104
104
|
this.audioBufferPromise = firstGroup.renderAudio(
|
|
105
105
|
renderOptions.encoderOptions.alignedFromUs / 1e3,
|
|
106
106
|
renderOptions.encoderOptions.alignedToUs / 1e3
|
|
107
|
-
// renderOptions.encoderOptions.fromMs,
|
|
108
|
-
// renderOptions.encoderOptions.toMs,
|
|
109
107
|
);
|
|
110
108
|
console.log("Initialized");
|
|
111
109
|
}
|
|
@@ -2,7 +2,6 @@ import { Task } from '@lit/task';
|
|
|
2
2
|
import { EFMedia } from './EFMedia.js';
|
|
3
3
|
export declare class EFAudio extends EFMedia {
|
|
4
4
|
audioElementRef: import('lit-html/directives/ref.js').Ref<HTMLAudioElement>;
|
|
5
|
-
src: string;
|
|
6
5
|
render(): import('lit-html').TemplateResult<1>;
|
|
7
6
|
get audioElement(): HTMLAudioElement | undefined;
|
|
8
7
|
frameTask: Task<readonly [import('@lit/task').TaskStatus, import('@lit/task').TaskStatus, import('@lit/task').TaskStatus, import('@lit/task').TaskStatus, import('@lit/task').TaskStatus], void>;
|
package/dist/elements/EFAudio.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { Task } from "@lit/task";
|
|
2
2
|
import { html } from "lit";
|
|
3
|
-
import {
|
|
3
|
+
import { customElement } from "lit/decorators.js";
|
|
4
4
|
import { createRef, ref } from "lit/directives/ref.js";
|
|
5
5
|
import { EFMedia } from "./EFMedia.js";
|
|
6
6
|
var __defProp = Object.defineProperty;
|
|
@@ -17,7 +17,6 @@ let EFAudio = class extends EFMedia {
|
|
|
17
17
|
constructor() {
|
|
18
18
|
super(...arguments);
|
|
19
19
|
this.audioElementRef = createRef();
|
|
20
|
-
this.src = "";
|
|
21
20
|
this.frameTask = new Task(this, {
|
|
22
21
|
args: () => [
|
|
23
22
|
this.trackFragmentIndexLoader.status,
|
|
@@ -43,9 +42,6 @@ let EFAudio = class extends EFMedia {
|
|
|
43
42
|
return this.audioElementRef.value;
|
|
44
43
|
}
|
|
45
44
|
};
|
|
46
|
-
__decorateClass([
|
|
47
|
-
property({ type: String })
|
|
48
|
-
], EFAudio.prototype, "src", 2);
|
|
49
45
|
EFAudio = __decorateClass([
|
|
50
46
|
customElement("ef-audio")
|
|
51
47
|
], EFAudio);
|
|
@@ -335,7 +335,7 @@ let EFCaptions = class extends EFSourceMixin(
|
|
|
335
335
|
if (!transcriptionFragment) {
|
|
336
336
|
return;
|
|
337
337
|
}
|
|
338
|
-
const currentTimeMs = this.targetElement.
|
|
338
|
+
const currentTimeMs = this.targetElement.currentSourceTimeMs;
|
|
339
339
|
const currentTimeSec = currentTimeMs / 1e3;
|
|
340
340
|
const currentWord = transcriptionFragment.word_segments.find(
|
|
341
341
|
(word) => currentTimeSec >= word.start && currentTimeSec <= word.end
|
|
@@ -70,6 +70,7 @@ export declare class EFMedia extends EFMedia_base {
|
|
|
70
70
|
private static readonly MAX_DB;
|
|
71
71
|
private static readonly DECAY_WEIGHT;
|
|
72
72
|
get FREQ_WEIGHTS(): Float32Array;
|
|
73
|
-
|
|
73
|
+
byteTimeDomainTask: Task<readonly [import('@lit/task').TaskStatus, number, number, number], Uint8Array | null>;
|
|
74
|
+
frequencyDataTask: Task<readonly [import('@lit/task').TaskStatus, number, number, number], Uint8Array | null>;
|
|
74
75
|
}
|
|
75
76
|
export {};
|
package/dist/elements/EFMedia.js
CHANGED
|
@@ -10,6 +10,7 @@ import { EF_RENDERING } from "../EF_RENDERING.js";
|
|
|
10
10
|
import { EFSourceMixin } from "./EFSourceMixin.js";
|
|
11
11
|
import { EFTemporal, isEFTemporal } from "./EFTemporal.js";
|
|
12
12
|
import { FetchMixin } from "./FetchMixin.js";
|
|
13
|
+
import { EFTargetable } from "./TargetController.js";
|
|
13
14
|
var __defProp = Object.defineProperty;
|
|
14
15
|
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
15
16
|
var __decorateClass = (decorators, target, key, kind) => {
|
|
@@ -40,12 +41,13 @@ class LRUCache {
|
|
|
40
41
|
this.cache.delete(key);
|
|
41
42
|
} else if (this.cache.size >= this.maxSize) {
|
|
42
43
|
const firstKey = this.cache.keys().next().value;
|
|
43
|
-
|
|
44
|
+
if (firstKey) {
|
|
45
|
+
this.cache.delete(firstKey);
|
|
46
|
+
}
|
|
44
47
|
}
|
|
45
48
|
this.cache.set(key, value);
|
|
46
49
|
}
|
|
47
50
|
}
|
|
48
|
-
const frequencyDataCache = new LRUCache(100);
|
|
49
51
|
const deepGetMediaElements = (element, medias = []) => {
|
|
50
52
|
for (const child of Array.from(element.children)) {
|
|
51
53
|
if (child instanceof EFMedia) {
|
|
@@ -56,9 +58,11 @@ const deepGetMediaElements = (element, medias = []) => {
|
|
|
56
58
|
}
|
|
57
59
|
return medias;
|
|
58
60
|
};
|
|
59
|
-
const _EFMedia = class _EFMedia2 extends
|
|
60
|
-
|
|
61
|
-
|
|
61
|
+
const _EFMedia = class _EFMedia2 extends EFTargetable(
|
|
62
|
+
EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
|
|
63
|
+
assetType: "isobmff_files"
|
|
64
|
+
})
|
|
65
|
+
) {
|
|
62
66
|
constructor() {
|
|
63
67
|
super(...arguments);
|
|
64
68
|
this.currentTimeMs = 0;
|
|
@@ -240,16 +244,116 @@ const _EFMedia = class _EFMedia2 extends EFSourceMixin(EFTemporal(FetchMixin(Lit
|
|
|
240
244
|
});
|
|
241
245
|
this.fftSize = 512;
|
|
242
246
|
this.fftDecay = 8;
|
|
247
|
+
this.#byteTimeDomainCache = new LRUCache(100);
|
|
248
|
+
this.byteTimeDomainTask = new Task(this, {
|
|
249
|
+
autoRun: EF_INTERACTIVE,
|
|
250
|
+
args: () => [
|
|
251
|
+
this.audioBufferTask.status,
|
|
252
|
+
this.currentSourceTimeMs,
|
|
253
|
+
this.fftSize,
|
|
254
|
+
this.fftDecay
|
|
255
|
+
],
|
|
256
|
+
task: async () => {
|
|
257
|
+
await this.audioBufferTask.taskComplete;
|
|
258
|
+
if (!this.audioBufferTask.value) return null;
|
|
259
|
+
if (this.currentSourceTimeMs <= 0) return null;
|
|
260
|
+
const currentTimeMs = this.currentSourceTimeMs;
|
|
261
|
+
const startOffsetMs = this.audioBufferTask.value.startOffsetMs;
|
|
262
|
+
const audioBuffer = this.audioBufferTask.value.buffer;
|
|
263
|
+
const smoothedKey = `${this.fftSize}:${this.fftDecay}:${startOffsetMs}:${currentTimeMs}`;
|
|
264
|
+
const cachedSmoothedData = this.#byteTimeDomainCache.get(smoothedKey);
|
|
265
|
+
if (cachedSmoothedData) {
|
|
266
|
+
return cachedSmoothedData;
|
|
267
|
+
}
|
|
268
|
+
const framesData = await Promise.all(
|
|
269
|
+
Array.from({ length: this.fftDecay }, async (_, i) => {
|
|
270
|
+
const frameOffset = i * (1e3 / 30);
|
|
271
|
+
const startTime = Math.max(
|
|
272
|
+
0,
|
|
273
|
+
(currentTimeMs - frameOffset - startOffsetMs) / 1e3
|
|
274
|
+
);
|
|
275
|
+
const cacheKey = `${this.fftSize}:${startOffsetMs}:${startTime}`;
|
|
276
|
+
const cachedFrame = this.#byteTimeDomainCache.get(cacheKey);
|
|
277
|
+
if (cachedFrame) {
|
|
278
|
+
return cachedFrame;
|
|
279
|
+
}
|
|
280
|
+
const audioContext = new OfflineAudioContext(
|
|
281
|
+
2,
|
|
282
|
+
48e3 * (1 / 30),
|
|
283
|
+
48e3
|
|
284
|
+
);
|
|
285
|
+
const analyser = audioContext.createAnalyser();
|
|
286
|
+
analyser.fftSize = this.fftSize;
|
|
287
|
+
const gainNode = audioContext.createGain();
|
|
288
|
+
gainNode.gain.value = 10;
|
|
289
|
+
analyser.smoothingTimeConstant = 0.4;
|
|
290
|
+
analyser.minDecibels = -90;
|
|
291
|
+
analyser.maxDecibels = -10;
|
|
292
|
+
const audioBufferSource = audioContext.createBufferSource();
|
|
293
|
+
audioBufferSource.buffer = audioBuffer;
|
|
294
|
+
const filter = audioContext.createBiquadFilter();
|
|
295
|
+
filter.type = "bandpass";
|
|
296
|
+
filter.frequency.value = 1e3;
|
|
297
|
+
filter.Q.value = 0.5;
|
|
298
|
+
audioBufferSource.connect(gainNode);
|
|
299
|
+
gainNode.connect(filter);
|
|
300
|
+
filter.connect(analyser);
|
|
301
|
+
analyser.connect(audioContext.destination);
|
|
302
|
+
audioBufferSource.start(0, startTime, 1 / 30);
|
|
303
|
+
try {
|
|
304
|
+
await audioContext.startRendering();
|
|
305
|
+
const frameData = new Uint8Array(analyser.fftSize);
|
|
306
|
+
analyser.getByteTimeDomainData(frameData);
|
|
307
|
+
this.#byteTimeDomainCache.set(cacheKey, frameData);
|
|
308
|
+
return frameData;
|
|
309
|
+
} finally {
|
|
310
|
+
audioBufferSource.disconnect();
|
|
311
|
+
analyser.disconnect();
|
|
312
|
+
}
|
|
313
|
+
})
|
|
314
|
+
);
|
|
315
|
+
const frameLength = framesData[0]?.length ?? 0;
|
|
316
|
+
const smoothedData = new Uint8Array(frameLength);
|
|
317
|
+
for (let i = 0; i < frameLength; i++) {
|
|
318
|
+
let weightedSum = 0;
|
|
319
|
+
let weightSum = 0;
|
|
320
|
+
framesData.forEach((frame, frameIndex) => {
|
|
321
|
+
const decayWeight = _EFMedia2.DECAY_WEIGHT ** frameIndex;
|
|
322
|
+
weightedSum += frame[i] * decayWeight;
|
|
323
|
+
weightSum += decayWeight;
|
|
324
|
+
});
|
|
325
|
+
smoothedData[i] = Math.min(255, Math.round(weightedSum / weightSum));
|
|
326
|
+
}
|
|
327
|
+
this.#byteTimeDomainCache.set(
|
|
328
|
+
smoothedKey,
|
|
329
|
+
smoothedData.slice(0, Math.floor(smoothedData.length * 0.8))
|
|
330
|
+
);
|
|
331
|
+
return smoothedData;
|
|
332
|
+
}
|
|
333
|
+
});
|
|
334
|
+
this.#frequencyDataCache = new LRUCache(100);
|
|
243
335
|
this.frequencyDataTask = new Task(this, {
|
|
244
336
|
autoRun: EF_INTERACTIVE,
|
|
245
|
-
args: () => [
|
|
337
|
+
args: () => [
|
|
338
|
+
this.audioBufferTask.status,
|
|
339
|
+
this.currentSourceTimeMs,
|
|
340
|
+
this.fftSize,
|
|
341
|
+
// Add fftSize to dependency array
|
|
342
|
+
this.fftDecay
|
|
343
|
+
// Add fftDecay to dependency array
|
|
344
|
+
],
|
|
246
345
|
task: async () => {
|
|
247
346
|
await this.audioBufferTask.taskComplete;
|
|
248
347
|
if (!this.audioBufferTask.value) return null;
|
|
249
|
-
if (this.
|
|
250
|
-
const currentTimeMs = this.
|
|
348
|
+
if (this.currentSourceTimeMs <= 0) return null;
|
|
349
|
+
const currentTimeMs = this.currentSourceTimeMs;
|
|
251
350
|
const startOffsetMs = this.audioBufferTask.value.startOffsetMs;
|
|
252
351
|
const audioBuffer = this.audioBufferTask.value.buffer;
|
|
352
|
+
const smoothedKey = `${this.fftSize}:${this.fftDecay}:${startOffsetMs}:${currentTimeMs}`;
|
|
353
|
+
const cachedSmoothedData = this.#frequencyDataCache.get(smoothedKey);
|
|
354
|
+
if (cachedSmoothedData) {
|
|
355
|
+
return cachedSmoothedData;
|
|
356
|
+
}
|
|
253
357
|
const framesData = await Promise.all(
|
|
254
358
|
Array.from({ length: this.fftDecay }, async (_, i) => {
|
|
255
359
|
const frameOffset = i * (1e3 / 30);
|
|
@@ -257,8 +361,8 @@ const _EFMedia = class _EFMedia2 extends EFSourceMixin(EFTemporal(FetchMixin(Lit
|
|
|
257
361
|
0,
|
|
258
362
|
(currentTimeMs - frameOffset - startOffsetMs) / 1e3
|
|
259
363
|
);
|
|
260
|
-
const cacheKey = `${startOffsetMs}
|
|
261
|
-
const cachedFrame = frequencyDataCache.get(cacheKey);
|
|
364
|
+
const cacheKey = `${this.fftSize}:${startOffsetMs}:${startTime}`;
|
|
365
|
+
const cachedFrame = this.#frequencyDataCache.get(cacheKey);
|
|
262
366
|
if (cachedFrame) {
|
|
263
367
|
return cachedFrame;
|
|
264
368
|
}
|
|
@@ -278,9 +382,9 @@ const _EFMedia = class _EFMedia2 extends EFSourceMixin(EFTemporal(FetchMixin(Lit
|
|
|
278
382
|
audioBufferSource.start(0, startTime, 1 / 30);
|
|
279
383
|
try {
|
|
280
384
|
await audioContext.startRendering();
|
|
281
|
-
const frameData = new Uint8Array(
|
|
385
|
+
const frameData = new Uint8Array(this.fftSize / 2);
|
|
282
386
|
analyser.getByteFrequencyData(frameData);
|
|
283
|
-
frequencyDataCache.set(cacheKey, frameData);
|
|
387
|
+
this.#frequencyDataCache.set(cacheKey, frameData);
|
|
284
388
|
return frameData;
|
|
285
389
|
} finally {
|
|
286
390
|
audioBufferSource.disconnect();
|
|
@@ -304,7 +408,12 @@ const _EFMedia = class _EFMedia2 extends EFSourceMixin(EFTemporal(FetchMixin(Lit
|
|
|
304
408
|
const freqWeight = this.FREQ_WEIGHTS[i];
|
|
305
409
|
smoothedData[i] = Math.min(255, Math.round(value * freqWeight));
|
|
306
410
|
});
|
|
307
|
-
|
|
411
|
+
const slicedData = smoothedData.slice(
|
|
412
|
+
0,
|
|
413
|
+
Math.floor(smoothedData.length / 2)
|
|
414
|
+
);
|
|
415
|
+
this.#frequencyDataCache.set(smoothedKey, slicedData);
|
|
416
|
+
return slicedData;
|
|
308
417
|
}
|
|
309
418
|
});
|
|
310
419
|
}
|
|
@@ -359,7 +468,7 @@ const _EFMedia = class _EFMedia2 extends EFSourceMixin(EFTemporal(FetchMixin(Lit
|
|
|
359
468
|
}
|
|
360
469
|
updated(changedProperties) {
|
|
361
470
|
if (changedProperties.has("ownCurrentTimeMs")) {
|
|
362
|
-
this.executeSeek(this.
|
|
471
|
+
this.executeSeek(this.currentSourceTimeMs);
|
|
363
472
|
}
|
|
364
473
|
if (changedProperties.has("currentTime") || changedProperties.has("ownCurrentTimeMs")) {
|
|
365
474
|
const timelineTimeMs = (this.rootTimegroup ?? this).currentTimeMs;
|
|
@@ -546,6 +655,8 @@ const _EFMedia = class _EFMedia2 extends EFSourceMixin(EFTemporal(FetchMixin(Lit
|
|
|
546
655
|
freqWeightsCache.set(this.fftSize, weights);
|
|
547
656
|
return weights;
|
|
548
657
|
}
|
|
658
|
+
#byteTimeDomainCache;
|
|
659
|
+
#frequencyDataCache;
|
|
549
660
|
};
|
|
550
661
|
__decorateClass([
|
|
551
662
|
property({ type: Number })
|
|
@@ -135,18 +135,18 @@ export declare class TemporalMixinInterface {
|
|
|
135
135
|
* elements.
|
|
136
136
|
*
|
|
137
137
|
* For example, if the media has a `sourcein` value of 10s, when `ownCurrentTimeMs` is 0s,
|
|
138
|
-
* `
|
|
138
|
+
* `currentSourceTimeMs` will be 10s.
|
|
139
139
|
*
|
|
140
140
|
* sourcein=10s sourceout=10s
|
|
141
141
|
* / / /
|
|
142
142
|
* |--------|=================|---------|
|
|
143
143
|
* ^
|
|
144
144
|
* |_
|
|
145
|
-
*
|
|
145
|
+
* currentSourceTimeMs === 10s
|
|
146
146
|
* |_
|
|
147
147
|
* ownCurrentTimeMs === 0s
|
|
148
148
|
*/
|
|
149
|
-
get
|
|
149
|
+
get currentSourceTimeMs(): number;
|
|
150
150
|
set duration(value: string);
|
|
151
151
|
get duration(): string;
|
|
152
152
|
/**
|
|
@@ -278,6 +278,10 @@ const EFTemporal = (superClass) => {
|
|
|
278
278
|
get endTimeMs() {
|
|
279
279
|
return this.startTimeMs + this.durationMs;
|
|
280
280
|
}
|
|
281
|
+
/**
|
|
282
|
+
* The current time of the element within itself.
|
|
283
|
+
* Compare with `currentTimeMs` to see the current time with respect to the root timegroup
|
|
284
|
+
*/
|
|
281
285
|
get ownCurrentTimeMs() {
|
|
282
286
|
if (this.rootTimegroup) {
|
|
283
287
|
return Math.min(
|
|
@@ -291,7 +295,7 @@ const EFTemporal = (superClass) => {
|
|
|
291
295
|
* Used to calculate the internal currentTimeMs of the element. This is useful
|
|
292
296
|
* for mapping to internal media time codes for audio/video elements.
|
|
293
297
|
*/
|
|
294
|
-
get
|
|
298
|
+
get currentSourceTimeMs() {
|
|
295
299
|
if (this.rootTimegroup) {
|
|
296
300
|
if (this.sourceInMs && this.sourceOutMs) {
|
|
297
301
|
return Math.min(
|
|
@@ -316,7 +320,7 @@ const EFTemporal = (superClass) => {
|
|
|
316
320
|
super.updated(changedProperties);
|
|
317
321
|
if (changedProperties.has("currentTime") || changedProperties.has("ownCurrentTimeMs")) {
|
|
318
322
|
const timelineTimeMs = (this.rootTimegroup ?? this).ownCurrentTimeMs;
|
|
319
|
-
if (this.startTimeMs
|
|
323
|
+
if (this.startTimeMs > timelineTimeMs || this.endTimeMs < timelineTimeMs) {
|
|
320
324
|
this.style.display = "none";
|
|
321
325
|
return;
|
|
322
326
|
}
|
|
@@ -27,11 +27,7 @@ export declare class EFTimegroup extends EFTimegroup_base {
|
|
|
27
27
|
* that caused issues with constructing audio data. We had negative durations
|
|
28
28
|
* in calculations and it was not clear why.
|
|
29
29
|
*/
|
|
30
|
-
waitForMediaDurations(): Promise<(
|
|
31
|
-
trackId: string;
|
|
32
|
-
buffer: import('mp4box').MP4ArrayBuffer;
|
|
33
|
-
mp4File: import('../../../assets/src/MP4File.ts').MP4File;
|
|
34
|
-
}[] | undefined)[]>;
|
|
30
|
+
waitForMediaDurations(): Promise<Record<number, import('../../../assets/src/index.ts').TrackFragmentIndex>[]>;
|
|
35
31
|
get childTemporals(): import('./EFTemporal.js').TemporalMixinInterface[];
|
|
36
32
|
protected updated(changedProperties: PropertyValueMap<any> | Map<PropertyKey, unknown>): void;
|
|
37
33
|
private updateAnimations;
|
|
@@ -45,7 +45,7 @@ let EFTimegroup = class extends EFTemporal(LitElement) {
|
|
|
45
45
|
__privateAdd(this, _EFTimegroup_instances);
|
|
46
46
|
this._timeGroupContext = this;
|
|
47
47
|
__privateAdd(this, _currentTime, 0);
|
|
48
|
-
this.mode = "
|
|
48
|
+
this.mode = "contain";
|
|
49
49
|
this.overlapMs = 0;
|
|
50
50
|
this.fit = "none";
|
|
51
51
|
__privateAdd(this, _resizeObserver);
|
|
@@ -186,10 +186,9 @@ let EFTimegroup = class extends EFTemporal(LitElement) {
|
|
|
186
186
|
* in calculations and it was not clear why.
|
|
187
187
|
*/
|
|
188
188
|
async waitForMediaDurations() {
|
|
189
|
+
const mediaElements = deepGetMediaElements(this);
|
|
189
190
|
return await Promise.all(
|
|
190
|
-
|
|
191
|
-
(media) => media.initSegmentsLoader.taskComplete
|
|
192
|
-
)
|
|
191
|
+
mediaElements.map((m) => m.trackFragmentIndexLoader.taskComplete)
|
|
193
192
|
);
|
|
194
193
|
}
|
|
195
194
|
get childTemporals() {
|
|
@@ -402,7 +401,7 @@ __decorateClass([
|
|
|
402
401
|
property({ type: String })
|
|
403
402
|
], EFTimegroup.prototype, "fit", 2);
|
|
404
403
|
__decorateClass([
|
|
405
|
-
property({ type: Number })
|
|
404
|
+
property({ type: Number, attribute: "currenttime" })
|
|
406
405
|
], EFTimegroup.prototype, "currentTime", 1);
|
|
407
406
|
EFTimegroup = __decorateClass([
|
|
408
407
|
customElement("ef-timegroup")
|
|
@@ -1,8 +1,9 @@
|
|
|
1
|
-
import { EFAudio } from './EFAudio.js';
|
|
2
1
|
import { Task } from '@lit/task';
|
|
3
2
|
import { LitElement, PropertyValueMap } from 'lit';
|
|
4
3
|
import { Ref } from 'lit/directives/ref.js';
|
|
4
|
+
import { EFAudio } from './EFAudio.js';
|
|
5
5
|
import { EFVideo } from './EFVideo.js';
|
|
6
|
+
import { TargetController } from './TargetController.ts';
|
|
6
7
|
declare const EFWaveform_base: (new (...args: any[]) => import('./EFTemporal.js').TemporalMixinInterface) & typeof LitElement;
|
|
7
8
|
export declare class EFWaveform extends EFWaveform_base {
|
|
8
9
|
static styles: import('lit').CSSResult;
|
|
@@ -12,11 +13,12 @@ export declare class EFWaveform extends EFWaveform_base {
|
|
|
12
13
|
private resizeObserver?;
|
|
13
14
|
private mutationObserver?;
|
|
14
15
|
render(): import('lit-html').TemplateResult<1>;
|
|
15
|
-
mode: "roundBars" | "bars" | "bricks" | "line" | "pixel" | "wave";
|
|
16
|
+
mode: "roundBars" | "bars" | "bricks" | "line" | "curve" | "pixel" | "wave" | "spikes";
|
|
16
17
|
color: string;
|
|
17
|
-
|
|
18
|
+
target: string;
|
|
19
|
+
targetElement: EFAudio | EFVideo | null;
|
|
18
20
|
lineWidth: number;
|
|
19
|
-
|
|
21
|
+
targetController: TargetController;
|
|
20
22
|
connectedCallback(): void;
|
|
21
23
|
disconnectedCallback(): void;
|
|
22
24
|
private resizeCanvas;
|
|
@@ -26,11 +28,17 @@ export declare class EFWaveform extends EFWaveform_base {
|
|
|
26
28
|
protected drawRoundBars(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
|
|
27
29
|
protected drawEqualizer(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
|
|
28
30
|
protected drawLine(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
|
|
31
|
+
protected drawCurve(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
|
|
29
32
|
protected drawPixel(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
|
|
30
33
|
protected drawWave(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
|
|
31
|
-
|
|
34
|
+
protected drawSpikes(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
|
|
35
|
+
frameTask: Task<readonly [EFAudio | EFVideo | null, Uint8Array | null | undefined], void>;
|
|
32
36
|
get durationMs(): number;
|
|
33
|
-
get targetElement(): EFAudio | EFVideo | null;
|
|
34
37
|
protected updated(changedProperties: PropertyValueMap<this>): void;
|
|
35
38
|
}
|
|
39
|
+
declare global {
|
|
40
|
+
interface HTMLElementTagNameMap {
|
|
41
|
+
"ef-waveform": EFWaveform & Element;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
36
44
|
export {};
|