@editframe/elements 0.15.0-beta.8 → 0.16.0-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/EF_FRAMEGEN.d.ts +14 -10
- package/dist/EF_FRAMEGEN.js +17 -28
- package/dist/elements/EFCaptions.js +0 -7
- package/dist/elements/EFImage.js +0 -4
- package/dist/elements/EFMedia.d.ts +13 -7
- package/dist/elements/EFMedia.js +217 -111
- package/dist/elements/EFSourceMixin.js +2 -1
- package/dist/elements/EFTemporal.browsertest.d.ts +4 -3
- package/dist/elements/EFTemporal.d.ts +14 -11
- package/dist/elements/EFTemporal.js +63 -87
- package/dist/elements/EFTimegroup.d.ts +2 -4
- package/dist/elements/EFTimegroup.js +15 -103
- package/dist/elements/EFVideo.js +3 -1
- package/dist/elements/EFWaveform.d.ts +3 -2
- package/dist/elements/EFWaveform.js +39 -26
- package/dist/elements/durationConverter.d.ts +8 -8
- package/dist/elements/durationConverter.js +2 -2
- package/dist/elements/updateAnimations.d.ts +9 -0
- package/dist/elements/updateAnimations.js +62 -0
- package/dist/getRenderInfo.d.ts +51 -0
- package/dist/getRenderInfo.js +72 -0
- package/dist/gui/EFFilmstrip.js +7 -16
- package/dist/gui/EFFitScale.d.ts +27 -0
- package/dist/gui/EFFitScale.js +138 -0
- package/dist/gui/EFWorkbench.d.ts +2 -5
- package/dist/gui/EFWorkbench.js +13 -56
- package/dist/gui/TWMixin.css.js +1 -1
- package/dist/gui/TWMixin.js +14 -2
- package/dist/index.d.ts +2 -0
- package/dist/index.js +6 -1
- package/dist/style.css +6 -3
- package/package.json +9 -4
- package/src/elements/EFCaptions.browsertest.ts +2 -2
- package/src/elements/EFCaptions.ts +0 -7
- package/src/elements/EFImage.browsertest.ts +2 -2
- package/src/elements/EFImage.ts +0 -4
- package/src/elements/EFMedia.browsertest.ts +14 -14
- package/src/elements/EFMedia.ts +291 -136
- package/src/elements/EFSourceMixin.ts +4 -4
- package/src/elements/EFTemporal.browsertest.ts +64 -31
- package/src/elements/EFTemporal.ts +99 -119
- package/src/elements/EFTimegroup.ts +15 -133
- package/src/elements/EFVideo.ts +3 -1
- package/src/elements/EFWaveform.ts +54 -39
- package/src/elements/durationConverter.ts +9 -4
- package/src/elements/updateAnimations.ts +88 -0
- package/src/gui/ContextMixin.ts +0 -3
- package/src/gui/EFFilmstrip.ts +7 -16
- package/src/gui/EFFitScale.ts +152 -0
- package/src/gui/EFWorkbench.ts +18 -65
- package/src/gui/TWMixin.ts +19 -2
- package/types.json +1 -1
package/dist/elements/EFMedia.js
CHANGED
|
@@ -6,11 +6,11 @@ import { property, state } from "lit/decorators.js";
|
|
|
6
6
|
import { VideoAsset } from "@editframe/assets/EncodedAsset.js";
|
|
7
7
|
import { MP4File } from "@editframe/assets/MP4File.js";
|
|
8
8
|
import { EF_INTERACTIVE } from "../EF_INTERACTIVE.js";
|
|
9
|
-
import { EF_RENDERING } from "../EF_RENDERING.js";
|
|
10
9
|
import { EFSourceMixin } from "./EFSourceMixin.js";
|
|
11
|
-
import { EFTemporal
|
|
10
|
+
import { EFTemporal } from "./EFTemporal.js";
|
|
12
11
|
import { FetchMixin } from "./FetchMixin.js";
|
|
13
12
|
import { EFTargetable } from "./TargetController.js";
|
|
13
|
+
import { updateAnimations } from "./updateAnimations.js";
|
|
14
14
|
var __defProp = Object.defineProperty;
|
|
15
15
|
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
16
16
|
var __decorateClass = (decorators, target, key, kind) => {
|
|
@@ -70,12 +70,20 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
|
|
|
70
70
|
this.trackFragmentIndexLoader = new Task(this, {
|
|
71
71
|
args: () => [this.fragmentIndexPath(), this.fetch],
|
|
72
72
|
task: async ([fragmentIndexPath, fetch], { signal }) => {
|
|
73
|
-
|
|
74
|
-
|
|
73
|
+
try {
|
|
74
|
+
const response = await fetch(fragmentIndexPath, { signal });
|
|
75
|
+
return await response.json();
|
|
76
|
+
} catch (error) {
|
|
77
|
+
log("Failed to load track fragment index", error);
|
|
78
|
+
return void 0;
|
|
79
|
+
}
|
|
75
80
|
},
|
|
76
81
|
onComplete: () => {
|
|
82
|
+
this.requestUpdate("intrinsicDurationMs");
|
|
77
83
|
this.requestUpdate("ownCurrentTimeMs");
|
|
84
|
+
console.log("Requesting update for durationMs", this, this.rootTimegroup);
|
|
78
85
|
this.rootTimegroup?.requestUpdate("ownCurrentTimeMs");
|
|
86
|
+
this.rootTimegroup?.requestUpdate("durationMs");
|
|
79
87
|
}
|
|
80
88
|
});
|
|
81
89
|
this.initSegmentsLoader = new Task(this, {
|
|
@@ -242,8 +250,100 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
|
|
|
242
250
|
};
|
|
243
251
|
}
|
|
244
252
|
});
|
|
245
|
-
this
|
|
246
|
-
this.
|
|
253
|
+
this.#byteTimeDomainCache = new LRUCache(100);
|
|
254
|
+
this.byteTimeDomainTask = new Task(this, {
|
|
255
|
+
autoRun: EF_INTERACTIVE,
|
|
256
|
+
args: () => [
|
|
257
|
+
this.audioBufferTask.status,
|
|
258
|
+
this.currentSourceTimeMs,
|
|
259
|
+
this.fftSize,
|
|
260
|
+
this.fftDecay,
|
|
261
|
+
this.fftGain,
|
|
262
|
+
this.shouldInterpolateFrequencies
|
|
263
|
+
],
|
|
264
|
+
task: async () => {
|
|
265
|
+
await this.audioBufferTask.taskComplete;
|
|
266
|
+
if (!this.audioBufferTask.value) return null;
|
|
267
|
+
if (this.currentSourceTimeMs <= 0) return null;
|
|
268
|
+
const currentTimeMs = this.currentSourceTimeMs;
|
|
269
|
+
const startOffsetMs = this.audioBufferTask.value.startOffsetMs;
|
|
270
|
+
const audioBuffer = this.audioBufferTask.value.buffer;
|
|
271
|
+
const smoothedKey = `${this.shouldInterpolateFrequencies}:${this.fftSize}:${this.fftDecay}:${this.fftGain}:${startOffsetMs}:${currentTimeMs}`;
|
|
272
|
+
const cachedData = this.#byteTimeDomainCache.get(smoothedKey);
|
|
273
|
+
if (cachedData) return cachedData;
|
|
274
|
+
const framesData = await Promise.all(
|
|
275
|
+
Array.from({ length: this.fftDecay }, async (_, frameIndex) => {
|
|
276
|
+
const frameOffset = frameIndex * (1e3 / 30);
|
|
277
|
+
const startTime = Math.max(
|
|
278
|
+
0,
|
|
279
|
+
(currentTimeMs - frameOffset - startOffsetMs) / 1e3
|
|
280
|
+
);
|
|
281
|
+
const cacheKey = `${this.shouldInterpolateFrequencies}:${this.fftSize}:${this.fftGain}:${startOffsetMs}:${startTime}`;
|
|
282
|
+
const cachedFrame = this.#byteTimeDomainCache.get(cacheKey);
|
|
283
|
+
if (cachedFrame) return cachedFrame;
|
|
284
|
+
const audioContext = new OfflineAudioContext(
|
|
285
|
+
2,
|
|
286
|
+
48e3 * (1 / 30),
|
|
287
|
+
48e3
|
|
288
|
+
);
|
|
289
|
+
const source = audioContext.createBufferSource();
|
|
290
|
+
source.buffer = audioBuffer;
|
|
291
|
+
const analyser = audioContext.createAnalyser();
|
|
292
|
+
analyser.fftSize = this.fftSize;
|
|
293
|
+
analyser.minDecibels = -90;
|
|
294
|
+
analyser.maxDecibels = -20;
|
|
295
|
+
const gainNode = audioContext.createGain();
|
|
296
|
+
gainNode.gain.value = this.fftGain;
|
|
297
|
+
source.connect(gainNode);
|
|
298
|
+
gainNode.connect(analyser);
|
|
299
|
+
analyser.connect(audioContext.destination);
|
|
300
|
+
source.start(0, startTime, 1 / 30);
|
|
301
|
+
const dataLength = analyser.fftSize / 2;
|
|
302
|
+
try {
|
|
303
|
+
await audioContext.startRendering();
|
|
304
|
+
const frameData = new Uint8Array(dataLength);
|
|
305
|
+
analyser.getByteTimeDomainData(frameData);
|
|
306
|
+
const points = new Uint8Array(dataLength);
|
|
307
|
+
for (let i = 0; i < dataLength; i++) {
|
|
308
|
+
const pointSamples = frameData.slice(
|
|
309
|
+
i * (frameData.length / dataLength),
|
|
310
|
+
(i + 1) * (frameData.length / dataLength)
|
|
311
|
+
);
|
|
312
|
+
const rms = Math.sqrt(
|
|
313
|
+
pointSamples.reduce((sum, sample) => {
|
|
314
|
+
const normalized = (sample - 128) / 128;
|
|
315
|
+
return sum + normalized * normalized;
|
|
316
|
+
}, 0) / pointSamples.length
|
|
317
|
+
);
|
|
318
|
+
const avgSign = Math.sign(
|
|
319
|
+
pointSamples.reduce((sum, sample) => sum + (sample - 128), 0)
|
|
320
|
+
);
|
|
321
|
+
points[i] = Math.min(255, Math.round(128 + avgSign * rms * 128));
|
|
322
|
+
}
|
|
323
|
+
this.#byteTimeDomainCache.set(cacheKey, points);
|
|
324
|
+
return points;
|
|
325
|
+
} finally {
|
|
326
|
+
source.disconnect();
|
|
327
|
+
analyser.disconnect();
|
|
328
|
+
}
|
|
329
|
+
})
|
|
330
|
+
);
|
|
331
|
+
const frameLength = framesData[0]?.length ?? 0;
|
|
332
|
+
const smoothedData = new Uint8Array(frameLength);
|
|
333
|
+
for (let i = 0; i < frameLength; i++) {
|
|
334
|
+
let weightedSum = 0;
|
|
335
|
+
let weightSum = 0;
|
|
336
|
+
framesData.forEach((frame, frameIndex) => {
|
|
337
|
+
const decayWeight = _EFMedia2.DECAY_WEIGHT ** frameIndex;
|
|
338
|
+
weightedSum += (frame[i] ?? 0) * decayWeight;
|
|
339
|
+
weightSum += decayWeight;
|
|
340
|
+
});
|
|
341
|
+
smoothedData[i] = Math.min(255, Math.round(weightedSum / weightSum));
|
|
342
|
+
}
|
|
343
|
+
this.#byteTimeDomainCache.set(smoothedKey, smoothedData);
|
|
344
|
+
return smoothedData;
|
|
345
|
+
}
|
|
346
|
+
});
|
|
247
347
|
this.#frequencyDataCache = new LRUCache(100);
|
|
248
348
|
this.frequencyDataTask = new Task(this, {
|
|
249
349
|
autoRun: EF_INTERACTIVE,
|
|
@@ -251,9 +351,9 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
|
|
|
251
351
|
this.audioBufferTask.status,
|
|
252
352
|
this.currentSourceTimeMs,
|
|
253
353
|
this.fftSize,
|
|
254
|
-
|
|
255
|
-
this.
|
|
256
|
-
|
|
354
|
+
this.fftDecay,
|
|
355
|
+
this.fftGain,
|
|
356
|
+
this.shouldInterpolateFrequencies
|
|
257
357
|
],
|
|
258
358
|
task: async () => {
|
|
259
359
|
await this.audioBufferTask.taskComplete;
|
|
@@ -262,7 +362,7 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
|
|
|
262
362
|
const currentTimeMs = this.currentSourceTimeMs;
|
|
263
363
|
const startOffsetMs = this.audioBufferTask.value.startOffsetMs;
|
|
264
364
|
const audioBuffer = this.audioBufferTask.value.buffer;
|
|
265
|
-
const smoothedKey = `${this.fftSize}:${this.fftDecay}:${startOffsetMs}:${currentTimeMs}`;
|
|
365
|
+
const smoothedKey = `${this.shouldInterpolateFrequencies}:${this.fftSize}:${this.fftDecay}:${this.fftGain}:${startOffsetMs}:${currentTimeMs}`;
|
|
266
366
|
const cachedSmoothedData = this.#frequencyDataCache.get(smoothedKey);
|
|
267
367
|
if (cachedSmoothedData) {
|
|
268
368
|
return cachedSmoothedData;
|
|
@@ -274,7 +374,7 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
|
|
|
274
374
|
0,
|
|
275
375
|
(currentTimeMs - frameOffset - startOffsetMs) / 1e3
|
|
276
376
|
);
|
|
277
|
-
const cacheKey = `${this.fftSize}:${startOffsetMs}:${startTime}`;
|
|
377
|
+
const cacheKey = `${this.shouldInterpolateFrequencies}:${this.fftSize}:${this.fftGain}:${startOffsetMs}:${startTime}`;
|
|
278
378
|
const cachedFrame = this.#frequencyDataCache.get(cacheKey);
|
|
279
379
|
if (cachedFrame) {
|
|
280
380
|
return cachedFrame;
|
|
@@ -286,11 +386,19 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
|
|
|
286
386
|
);
|
|
287
387
|
const analyser = audioContext.createAnalyser();
|
|
288
388
|
analyser.fftSize = this.fftSize;
|
|
289
|
-
analyser.minDecibels =
|
|
290
|
-
analyser.maxDecibels =
|
|
389
|
+
analyser.minDecibels = -90;
|
|
390
|
+
analyser.maxDecibels = -10;
|
|
391
|
+
const gainNode = audioContext.createGain();
|
|
392
|
+
gainNode.gain.value = this.fftGain;
|
|
393
|
+
const filter = audioContext.createBiquadFilter();
|
|
394
|
+
filter.type = "bandpass";
|
|
395
|
+
filter.frequency.value = 15e3;
|
|
396
|
+
filter.Q.value = 0.05;
|
|
291
397
|
const audioBufferSource = audioContext.createBufferSource();
|
|
292
398
|
audioBufferSource.buffer = audioBuffer;
|
|
293
|
-
audioBufferSource.connect(
|
|
399
|
+
audioBufferSource.connect(filter);
|
|
400
|
+
filter.connect(gainNode);
|
|
401
|
+
gainNode.connect(analyser);
|
|
294
402
|
analyser.connect(audioContext.destination);
|
|
295
403
|
audioBufferSource.start(0, startTime, 1 / 30);
|
|
296
404
|
try {
|
|
@@ -325,8 +433,9 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
|
|
|
325
433
|
0,
|
|
326
434
|
Math.floor(smoothedData.length / 2)
|
|
327
435
|
);
|
|
328
|
-
this
|
|
329
|
-
|
|
436
|
+
const processedData = this.shouldInterpolateFrequencies ? processFFTData(slicedData) : slicedData;
|
|
437
|
+
this.#frequencyDataCache.set(smoothedKey, processedData);
|
|
438
|
+
return processedData;
|
|
330
439
|
}
|
|
331
440
|
});
|
|
332
441
|
}
|
|
@@ -350,18 +459,12 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
|
|
|
350
459
|
}
|
|
351
460
|
fragmentIndexPath() {
|
|
352
461
|
if (this.assetId) {
|
|
353
|
-
if (EF_RENDERING()) {
|
|
354
|
-
return `editframe://api/v1/isobmff_files/${this.assetId}/index`;
|
|
355
|
-
}
|
|
356
462
|
return `${this.apiHost}/api/v1/isobmff_files/${this.assetId}/index`;
|
|
357
463
|
}
|
|
358
464
|
return `/@ef-track-fragment-index/${this.src ?? ""}`;
|
|
359
465
|
}
|
|
360
466
|
fragmentTrackPath(trackId) {
|
|
361
467
|
if (this.assetId) {
|
|
362
|
-
if (EF_RENDERING()) {
|
|
363
|
-
return `editframe://api/v1/isobmff_tracks/${this.assetId}/${trackId}`;
|
|
364
|
-
}
|
|
365
468
|
return `${this.apiHost}/api/v1/isobmff_tracks/${this.assetId}/${trackId}`;
|
|
366
469
|
}
|
|
367
470
|
return `/@ef-track/${this.src ?? ""}?trackId=${trackId}`;
|
|
@@ -384,71 +487,13 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
|
|
|
384
487
|
this.executeSeek(this.currentSourceTimeMs);
|
|
385
488
|
}
|
|
386
489
|
if (changedProperties.has("currentTime") || changedProperties.has("ownCurrentTimeMs")) {
|
|
387
|
-
|
|
388
|
-
if (this.startTimeMs > timelineTimeMs || this.endTimeMs < timelineTimeMs) {
|
|
389
|
-
this.style.display = "none";
|
|
390
|
-
return;
|
|
391
|
-
}
|
|
392
|
-
this.style.display = "";
|
|
393
|
-
const animations = this.getAnimations({ subtree: true });
|
|
394
|
-
this.style.setProperty("--ef-duration", `${this.durationMs}ms`);
|
|
395
|
-
this.style.setProperty(
|
|
396
|
-
"--ef-transition-duration",
|
|
397
|
-
`${this.parentTimegroup?.overlapMs ?? 0}ms`
|
|
398
|
-
);
|
|
399
|
-
this.style.setProperty(
|
|
400
|
-
"--ef-transition-out-start",
|
|
401
|
-
`${this.durationMs - (this.parentTimegroup?.overlapMs ?? 0)}ms`
|
|
402
|
-
);
|
|
403
|
-
for (const animation of animations) {
|
|
404
|
-
if (animation.playState === "running") {
|
|
405
|
-
animation.pause();
|
|
406
|
-
}
|
|
407
|
-
const effect = animation.effect;
|
|
408
|
-
if (!(effect && effect instanceof KeyframeEffect)) {
|
|
409
|
-
return;
|
|
410
|
-
}
|
|
411
|
-
const target = effect.target;
|
|
412
|
-
if (!target) {
|
|
413
|
-
return;
|
|
414
|
-
}
|
|
415
|
-
if (target.closest("ef-video, ef-audio") !== this) {
|
|
416
|
-
return;
|
|
417
|
-
}
|
|
418
|
-
if (isEFTemporal(target)) {
|
|
419
|
-
const timing = effect.getTiming();
|
|
420
|
-
const duration = Number(timing.duration) ?? 0;
|
|
421
|
-
const delay = Number(timing.delay);
|
|
422
|
-
const newTime = Math.floor(
|
|
423
|
-
Math.min(target.ownCurrentTimeMs, duration - 1 + delay)
|
|
424
|
-
);
|
|
425
|
-
if (Number.isNaN(newTime)) {
|
|
426
|
-
return;
|
|
427
|
-
}
|
|
428
|
-
animation.currentTime = newTime;
|
|
429
|
-
} else if (target) {
|
|
430
|
-
const nearestTimegroup = target.closest("ef-timegroup");
|
|
431
|
-
if (!nearestTimegroup) {
|
|
432
|
-
return;
|
|
433
|
-
}
|
|
434
|
-
const timing = effect.getTiming();
|
|
435
|
-
const duration = Number(timing.duration) ?? 0;
|
|
436
|
-
const delay = Number(timing.delay);
|
|
437
|
-
const newTime = Math.floor(
|
|
438
|
-
Math.min(nearestTimegroup.ownCurrentTimeMs, duration - 1 + delay)
|
|
439
|
-
);
|
|
440
|
-
if (Number.isNaN(newTime)) {
|
|
441
|
-
return;
|
|
442
|
-
}
|
|
443
|
-
animation.currentTime = newTime;
|
|
444
|
-
}
|
|
445
|
-
}
|
|
490
|
+
updateAnimations(this);
|
|
446
491
|
}
|
|
447
492
|
}
|
|
448
493
|
get hasOwnDuration() {
|
|
449
494
|
return true;
|
|
450
495
|
}
|
|
451
|
-
get
|
|
496
|
+
get intrinsicDurationMs() {
|
|
452
497
|
if (!this.trackFragmentIndexLoader.value) {
|
|
453
498
|
return 0;
|
|
454
499
|
}
|
|
@@ -460,30 +505,18 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
|
|
|
460
505
|
if (durations.length === 0) {
|
|
461
506
|
return 0;
|
|
462
507
|
}
|
|
463
|
-
|
|
464
|
-
return Math.max(this.sourceOutMs - this.sourceInMs);
|
|
465
|
-
}
|
|
466
|
-
if (this.sourceInMs) {
|
|
467
|
-
return Math.max(...durations) - this.trimStartMs - this.trimEndMs - this.sourceInMs;
|
|
468
|
-
}
|
|
469
|
-
if (this.sourceOutMs) {
|
|
470
|
-
return Math.max(...durations) - this.trimStartMs - this.trimEndMs - this.sourceOutMs;
|
|
471
|
-
}
|
|
472
|
-
if (this.sourceInMs && this.sourceOutMs) {
|
|
473
|
-
return Math.max(...durations) - this.trimStartMs - this.trimEndMs - this.sourceOutMs - this.sourceInMs;
|
|
474
|
-
}
|
|
475
|
-
return Math.max(...durations) - this.trimStartMs - this.trimEndMs;
|
|
508
|
+
return Math.max(...durations);
|
|
476
509
|
}
|
|
477
510
|
#audioContext;
|
|
478
511
|
async fetchAudioSpanningTime(fromMs, toMs) {
|
|
479
512
|
if (this.sourceInMs) {
|
|
480
|
-
fromMs -= this.startTimeMs - this.trimStartMs - this.sourceInMs;
|
|
513
|
+
fromMs -= this.startTimeMs - (this.trimStartMs ?? 0) - (this.sourceInMs ?? 0);
|
|
481
514
|
}
|
|
482
515
|
if (this.sourceOutMs) {
|
|
483
|
-
toMs -= this.startTimeMs - this.trimStartMs - this.sourceOutMs;
|
|
516
|
+
toMs -= this.startTimeMs - (this.trimStartMs ?? 0) - (this.sourceOutMs ?? 0);
|
|
484
517
|
}
|
|
485
|
-
fromMs -= this.startTimeMs - this.trimStartMs;
|
|
486
|
-
toMs -= this.startTimeMs - this.trimStartMs;
|
|
518
|
+
fromMs -= this.startTimeMs - (this.trimStartMs ?? 0);
|
|
519
|
+
toMs -= this.startTimeMs - (this.trimStartMs ?? 0);
|
|
487
520
|
await this.trackFragmentIndexLoader.taskComplete;
|
|
488
521
|
const audioTrackId = this.defaultAudioTrackId;
|
|
489
522
|
if (!audioTrackId) {
|
|
@@ -537,15 +570,39 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
|
|
|
537
570
|
});
|
|
538
571
|
return {
|
|
539
572
|
blob: audioBlob,
|
|
540
|
-
startMs: firstFragment.dts / audioTrackIndex.timescale * 1e3 - this.trimStartMs,
|
|
541
|
-
endMs: lastFragment.dts / audioTrackIndex.timescale * 1e3 + lastFragment.duration / audioTrackIndex.timescale * 1e3 - this.trimEndMs
|
|
573
|
+
startMs: firstFragment.dts / audioTrackIndex.timescale * 1e3 - (this.trimStartMs ?? 0),
|
|
574
|
+
endMs: lastFragment.dts / audioTrackIndex.timescale * 1e3 + lastFragment.duration / audioTrackIndex.timescale * 1e3 - (this.trimEndMs ?? 0)
|
|
542
575
|
};
|
|
543
576
|
}
|
|
544
|
-
|
|
545
|
-
|
|
577
|
+
set fftSize(value) {
|
|
578
|
+
const oldValue = this.fftSize;
|
|
579
|
+
this.setAttribute("fft-size", String(value));
|
|
580
|
+
this.requestUpdate("fft-size", oldValue);
|
|
546
581
|
}
|
|
547
|
-
|
|
548
|
-
|
|
582
|
+
set fftDecay(value) {
|
|
583
|
+
const oldValue = this.fftDecay;
|
|
584
|
+
this.setAttribute("fft-decay", String(value));
|
|
585
|
+
this.requestUpdate("fft-decay", oldValue);
|
|
586
|
+
}
|
|
587
|
+
get fftSize() {
|
|
588
|
+
return Number.parseInt(this.getAttribute("fft-size") ?? "128", 10);
|
|
589
|
+
}
|
|
590
|
+
get fftDecay() {
|
|
591
|
+
return Number.parseInt(this.getAttribute("fft-decay") ?? "8", 10);
|
|
592
|
+
}
|
|
593
|
+
set interpolateFrequencies(value) {
|
|
594
|
+
const oldValue = this.interpolateFrequencies;
|
|
595
|
+
this.setAttribute("interpolate-frequencies", String(value));
|
|
596
|
+
this.requestUpdate("interpolate-frequencies", oldValue);
|
|
597
|
+
}
|
|
598
|
+
get interpolateFrequencies() {
|
|
599
|
+
return this.getAttribute("interpolate-frequencies") !== "false";
|
|
600
|
+
}
|
|
601
|
+
get shouldInterpolateFrequencies() {
|
|
602
|
+
if (this.hasAttribute("interpolate-frequencies")) {
|
|
603
|
+
return this.getAttribute("interpolate-frequencies") !== "false";
|
|
604
|
+
}
|
|
605
|
+
return false;
|
|
549
606
|
}
|
|
550
607
|
static {
|
|
551
608
|
this.DECAY_WEIGHT = 0.7;
|
|
@@ -568,7 +625,16 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
|
|
|
568
625
|
freqWeightsCache.set(this.fftSize, weights);
|
|
569
626
|
return weights;
|
|
570
627
|
}
|
|
628
|
+
#byteTimeDomainCache;
|
|
571
629
|
#frequencyDataCache;
|
|
630
|
+
set fftGain(value) {
|
|
631
|
+
const oldValue = this.fftGain;
|
|
632
|
+
this.setAttribute("fft-gain", String(value));
|
|
633
|
+
this.requestUpdate("fft-gain", oldValue);
|
|
634
|
+
}
|
|
635
|
+
get fftGain() {
|
|
636
|
+
return Number.parseFloat(this.getAttribute("fft-gain") ?? "3.0");
|
|
637
|
+
}
|
|
572
638
|
};
|
|
573
639
|
__decorateClass([
|
|
574
640
|
property({ type: Number })
|
|
@@ -579,13 +645,53 @@ __decorateClass([
|
|
|
579
645
|
__decorateClass([
|
|
580
646
|
state()
|
|
581
647
|
], _EFMedia.prototype, "desiredSeekTimeMs", 2);
|
|
582
|
-
__decorateClass([
|
|
583
|
-
property({ type: Number })
|
|
584
|
-
], _EFMedia.prototype, "fftSize", 2);
|
|
585
|
-
__decorateClass([
|
|
586
|
-
property({ type: Number })
|
|
587
|
-
], _EFMedia.prototype, "fftDecay", 2);
|
|
588
648
|
let EFMedia = _EFMedia;
|
|
649
|
+
function processFFTData(fftData, zeroThresholdPercent = 0.1) {
|
|
650
|
+
const totalBins = fftData.length;
|
|
651
|
+
const zeroThresholdCount = Math.floor(totalBins * zeroThresholdPercent);
|
|
652
|
+
let zeroCount = 0;
|
|
653
|
+
let cutoffIndex = totalBins;
|
|
654
|
+
for (let i = totalBins - 1; i >= 0; i--) {
|
|
655
|
+
if (fftData[i] < 10) {
|
|
656
|
+
zeroCount++;
|
|
657
|
+
} else {
|
|
658
|
+
if (zeroCount >= zeroThresholdCount) {
|
|
659
|
+
cutoffIndex = i + 1;
|
|
660
|
+
break;
|
|
661
|
+
}
|
|
662
|
+
}
|
|
663
|
+
}
|
|
664
|
+
if (cutoffIndex < zeroThresholdCount) {
|
|
665
|
+
return fftData;
|
|
666
|
+
}
|
|
667
|
+
const goodData = fftData.slice(0, cutoffIndex);
|
|
668
|
+
const resampledData = interpolateData(goodData, fftData.length);
|
|
669
|
+
const attenuationStartIndex = Math.floor(totalBins * 0.9);
|
|
670
|
+
for (let i = attenuationStartIndex; i < totalBins; i++) {
|
|
671
|
+
const attenuationProgress = (i - attenuationStartIndex) / (totalBins - attenuationStartIndex) + 0.2;
|
|
672
|
+
const attenuationFactor = Math.max(0, 1 - attenuationProgress);
|
|
673
|
+
resampledData[i] = Math.floor(resampledData[i] * attenuationFactor);
|
|
674
|
+
}
|
|
675
|
+
return resampledData;
|
|
676
|
+
}
|
|
677
|
+
function interpolateData(data, targetSize) {
|
|
678
|
+
const resampled = new Uint8Array(targetSize);
|
|
679
|
+
const dataLength = data.length;
|
|
680
|
+
for (let i = 0; i < targetSize; i++) {
|
|
681
|
+
const ratio = i / (targetSize - 1) * (dataLength - 1);
|
|
682
|
+
const index = Math.floor(ratio);
|
|
683
|
+
const fraction = ratio - index;
|
|
684
|
+
if (index >= dataLength - 1) {
|
|
685
|
+
resampled[i] = data[dataLength - 1];
|
|
686
|
+
} else {
|
|
687
|
+
resampled[i] = Math.round(
|
|
688
|
+
// biome-ignore lint/style/noNonNullAssertion: Manual bounds check
|
|
689
|
+
data[index] * (1 - fraction) + data[index + 1] * fraction
|
|
690
|
+
);
|
|
691
|
+
}
|
|
692
|
+
}
|
|
693
|
+
return resampled;
|
|
694
|
+
}
|
|
589
695
|
export {
|
|
590
696
|
EFMedia,
|
|
591
697
|
deepGetMediaElements
|
|
@@ -25,7 +25,8 @@ function EFSourceMixin(superClass, options) {
|
|
|
25
25
|
});
|
|
26
26
|
}
|
|
27
27
|
get apiHost() {
|
|
28
|
-
|
|
28
|
+
const apiHost = this.closest("ef-configuration")?.apiHost ?? this.closest("ef-workbench")?.apiHost ?? this.closest("ef-preview")?.apiHost;
|
|
29
|
+
return apiHost || "https://editframe.dev";
|
|
29
30
|
}
|
|
30
31
|
productionSrc() {
|
|
31
32
|
if (!this.md5SumLoader.value) {
|
|
@@ -1,10 +1,11 @@
|
|
|
1
1
|
import { LitElement } from 'lit';
|
|
2
|
-
declare const
|
|
3
|
-
declare class
|
|
2
|
+
declare const TenSeconds_base: (new (...args: any[]) => import('./EFTemporal.js').TemporalMixinInterface) & typeof LitElement;
|
|
3
|
+
declare class TenSeconds extends TenSeconds_base {
|
|
4
|
+
get intrinsicDurationMs(): number;
|
|
4
5
|
}
|
|
5
6
|
declare global {
|
|
6
7
|
interface HTMLElementTagNameMap {
|
|
7
|
-
"
|
|
8
|
+
"ten-seconds": TenSeconds;
|
|
8
9
|
}
|
|
9
10
|
}
|
|
10
11
|
export {};
|
|
@@ -10,6 +10,7 @@ export declare class TemporalMixinInterface {
|
|
|
10
10
|
* Whether the element has a duration set as an attribute.
|
|
11
11
|
*/
|
|
12
12
|
get hasExplicitDuration(): boolean;
|
|
13
|
+
get sourceStartMs(): number;
|
|
13
14
|
/**
|
|
14
15
|
* Used to trim the start of the media.
|
|
15
16
|
*
|
|
@@ -19,7 +20,7 @@ export declare class TemporalMixinInterface {
|
|
|
19
20
|
*
|
|
20
21
|
* @domAttribute "trimstart"
|
|
21
22
|
*/
|
|
22
|
-
get trimStartMs(): number;
|
|
23
|
+
get trimStartMs(): number | undefined;
|
|
23
24
|
/**
|
|
24
25
|
* Used to trim the end of the media.
|
|
25
26
|
*
|
|
@@ -30,10 +31,10 @@ export declare class TemporalMixinInterface {
|
|
|
30
31
|
* @domAttribute "trimend"
|
|
31
32
|
*/
|
|
32
33
|
get trimEndMs(): number;
|
|
33
|
-
set trimStartMs(value: number);
|
|
34
|
-
set trimEndMs(value: number);
|
|
35
|
-
set trimstart(value: string);
|
|
36
|
-
set trimend(value: string);
|
|
34
|
+
set trimStartMs(value: number | undefined);
|
|
35
|
+
set trimEndMs(value: number | undefined);
|
|
36
|
+
set trimstart(value: string | undefined);
|
|
37
|
+
set trimend(value: string | undefined);
|
|
37
38
|
/**
|
|
38
39
|
* The source in time of the element.
|
|
39
40
|
*
|
|
@@ -53,7 +54,7 @@ export declare class TemporalMixinInterface {
|
|
|
53
54
|
*
|
|
54
55
|
* @domAttribute "sourcein"
|
|
55
56
|
*/
|
|
56
|
-
get sourceInMs(): number;
|
|
57
|
+
get sourceInMs(): number | undefined;
|
|
57
58
|
/**
|
|
58
59
|
* The source out time of the element.
|
|
59
60
|
*
|
|
@@ -75,15 +76,17 @@ export declare class TemporalMixinInterface {
|
|
|
75
76
|
*
|
|
76
77
|
* @domAttribute "sourceout"
|
|
77
78
|
*/
|
|
78
|
-
get sourceOutMs(): number;
|
|
79
|
-
set sourceInMs(value: number);
|
|
80
|
-
set sourceOutMs(value: number);
|
|
81
|
-
set sourcein(value: string);
|
|
82
|
-
set sourceout(value: string);
|
|
79
|
+
get sourceOutMs(): number | undefined;
|
|
80
|
+
set sourceInMs(value: number | undefined);
|
|
81
|
+
set sourceOutMs(value: number | undefined);
|
|
82
|
+
set sourcein(value: string | undefined);
|
|
83
|
+
set sourceout(value: string | undefined);
|
|
83
84
|
/**
|
|
84
85
|
* @domAttribute "duration"
|
|
85
86
|
*/
|
|
86
87
|
get durationMs(): number;
|
|
88
|
+
get explicitDurationMs(): number | undefined;
|
|
89
|
+
get intrinsicDurationMs(): number | undefined;
|
|
87
90
|
/**
|
|
88
91
|
* The start time of the element within its root timegroup in milliseconds.
|
|
89
92
|
*
|