@editframe/elements 0.15.0-beta.9 → 0.16.0-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/dist/EF_FRAMEGEN.d.ts +14 -10
  2. package/dist/EF_FRAMEGEN.js +17 -28
  3. package/dist/elements/EFCaptions.js +0 -7
  4. package/dist/elements/EFImage.js +0 -4
  5. package/dist/elements/EFMedia.d.ts +13 -8
  6. package/dist/elements/EFMedia.js +164 -146
  7. package/dist/elements/EFSourceMixin.js +2 -1
  8. package/dist/elements/EFTemporal.browsertest.d.ts +4 -3
  9. package/dist/elements/EFTemporal.d.ts +14 -11
  10. package/dist/elements/EFTemporal.js +63 -87
  11. package/dist/elements/EFTimegroup.d.ts +2 -4
  12. package/dist/elements/EFTimegroup.js +15 -103
  13. package/dist/elements/EFVideo.js +3 -1
  14. package/dist/elements/EFWaveform.d.ts +1 -1
  15. package/dist/elements/EFWaveform.js +11 -28
  16. package/dist/elements/durationConverter.d.ts +8 -8
  17. package/dist/elements/durationConverter.js +2 -2
  18. package/dist/elements/updateAnimations.d.ts +9 -0
  19. package/dist/elements/updateAnimations.js +62 -0
  20. package/dist/getRenderInfo.d.ts +51 -0
  21. package/dist/getRenderInfo.js +72 -0
  22. package/dist/gui/EFFilmstrip.js +7 -16
  23. package/dist/gui/EFFitScale.d.ts +27 -0
  24. package/dist/gui/EFFitScale.js +138 -0
  25. package/dist/gui/EFWorkbench.d.ts +2 -5
  26. package/dist/gui/EFWorkbench.js +13 -56
  27. package/dist/gui/TWMixin.css.js +1 -1
  28. package/dist/gui/TWMixin.js +14 -2
  29. package/dist/index.d.ts +2 -0
  30. package/dist/index.js +6 -1
  31. package/dist/style.css +3 -3
  32. package/package.json +4 -3
  33. package/src/elements/EFCaptions.browsertest.ts +2 -2
  34. package/src/elements/EFCaptions.ts +0 -7
  35. package/src/elements/EFImage.browsertest.ts +2 -2
  36. package/src/elements/EFImage.ts +0 -4
  37. package/src/elements/EFMedia.browsertest.ts +14 -14
  38. package/src/elements/EFMedia.ts +220 -182
  39. package/src/elements/EFSourceMixin.ts +4 -4
  40. package/src/elements/EFTemporal.browsertest.ts +64 -31
  41. package/src/elements/EFTemporal.ts +99 -119
  42. package/src/elements/EFTimegroup.ts +15 -133
  43. package/src/elements/EFVideo.ts +3 -1
  44. package/src/elements/EFWaveform.ts +10 -44
  45. package/src/elements/durationConverter.ts +9 -4
  46. package/src/elements/updateAnimations.ts +88 -0
  47. package/src/gui/ContextMixin.ts +0 -3
  48. package/src/gui/EFFilmstrip.ts +7 -16
  49. package/src/gui/EFFitScale.ts +152 -0
  50. package/src/gui/EFWorkbench.ts +18 -65
  51. package/src/gui/TWMixin.ts +19 -2
  52. package/types.json +1 -1
@@ -6,11 +6,11 @@ import { property, state } from "lit/decorators.js";
6
6
  import { VideoAsset } from "@editframe/assets/EncodedAsset.js";
7
7
  import { MP4File } from "@editframe/assets/MP4File.js";
8
8
  import { EF_INTERACTIVE } from "../EF_INTERACTIVE.js";
9
- import { EF_RENDERING } from "../EF_RENDERING.js";
10
9
  import { EFSourceMixin } from "./EFSourceMixin.js";
11
- import { EFTemporal, isEFTemporal } from "./EFTemporal.js";
10
+ import { EFTemporal } from "./EFTemporal.js";
12
11
  import { FetchMixin } from "./FetchMixin.js";
13
12
  import { EFTargetable } from "./TargetController.js";
13
+ import { updateAnimations } from "./updateAnimations.js";
14
14
  var __defProp = Object.defineProperty;
15
15
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
16
16
  var __decorateClass = (decorators, target, key, kind) => {
@@ -70,12 +70,20 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
70
70
  this.trackFragmentIndexLoader = new Task(this, {
71
71
  args: () => [this.fragmentIndexPath(), this.fetch],
72
72
  task: async ([fragmentIndexPath, fetch], { signal }) => {
73
- const response = await fetch(fragmentIndexPath, { signal });
74
- return await response.json();
73
+ try {
74
+ const response = await fetch(fragmentIndexPath, { signal });
75
+ return await response.json();
76
+ } catch (error) {
77
+ log("Failed to load track fragment index", error);
78
+ return void 0;
79
+ }
75
80
  },
76
81
  onComplete: () => {
82
+ this.requestUpdate("intrinsicDurationMs");
77
83
  this.requestUpdate("ownCurrentTimeMs");
84
+ console.log("Requesting update for durationMs", this, this.rootTimegroup);
78
85
  this.rootTimegroup?.requestUpdate("ownCurrentTimeMs");
86
+ this.rootTimegroup?.requestUpdate("durationMs");
79
87
  }
80
88
  });
81
89
  this.initSegmentsLoader = new Task(this, {
@@ -242,8 +250,6 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
242
250
  };
243
251
  }
244
252
  });
245
- this.fftSize = 512;
246
- this.fftDecay = 8;
247
253
  this.#byteTimeDomainCache = new LRUCache(100);
248
254
  this.byteTimeDomainTask = new Task(this, {
249
255
  autoRun: EF_INTERACTIVE,
@@ -251,7 +257,9 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
251
257
  this.audioBufferTask.status,
252
258
  this.currentSourceTimeMs,
253
259
  this.fftSize,
254
- this.fftDecay
260
+ this.fftDecay,
261
+ this.fftGain,
262
+ this.shouldInterpolateFrequencies
255
263
  ],
256
264
  task: async () => {
257
265
  await this.audioBufferTask.taskComplete;
@@ -260,54 +268,62 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
260
268
  const currentTimeMs = this.currentSourceTimeMs;
261
269
  const startOffsetMs = this.audioBufferTask.value.startOffsetMs;
262
270
  const audioBuffer = this.audioBufferTask.value.buffer;
263
- const smoothedKey = `${this.fftSize}:${this.fftDecay}:${startOffsetMs}:${currentTimeMs}`;
264
- const cachedSmoothedData = this.#byteTimeDomainCache.get(smoothedKey);
265
- if (cachedSmoothedData) {
266
- return cachedSmoothedData;
267
- }
271
+ const smoothedKey = `${this.shouldInterpolateFrequencies}:${this.fftSize}:${this.fftDecay}:${this.fftGain}:${startOffsetMs}:${currentTimeMs}`;
272
+ const cachedData = this.#byteTimeDomainCache.get(smoothedKey);
273
+ if (cachedData) return cachedData;
268
274
  const framesData = await Promise.all(
269
- Array.from({ length: this.fftDecay }, async (_, i) => {
270
- const frameOffset = i * (1e3 / 30);
275
+ Array.from({ length: this.fftDecay }, async (_, frameIndex) => {
276
+ const frameOffset = frameIndex * (1e3 / 30);
271
277
  const startTime = Math.max(
272
278
  0,
273
279
  (currentTimeMs - frameOffset - startOffsetMs) / 1e3
274
280
  );
275
- const cacheKey = `${this.fftSize}:${startOffsetMs}:${startTime}`;
281
+ const cacheKey = `${this.shouldInterpolateFrequencies}:${this.fftSize}:${this.fftGain}:${startOffsetMs}:${startTime}`;
276
282
  const cachedFrame = this.#byteTimeDomainCache.get(cacheKey);
277
- if (cachedFrame) {
278
- return cachedFrame;
279
- }
283
+ if (cachedFrame) return cachedFrame;
280
284
  const audioContext = new OfflineAudioContext(
281
285
  2,
282
286
  48e3 * (1 / 30),
283
287
  48e3
284
288
  );
289
+ const source = audioContext.createBufferSource();
290
+ source.buffer = audioBuffer;
285
291
  const analyser = audioContext.createAnalyser();
286
292
  analyser.fftSize = this.fftSize;
287
- const gainNode = audioContext.createGain();
288
- gainNode.gain.value = 10;
289
- analyser.smoothingTimeConstant = 0.4;
290
293
  analyser.minDecibels = -90;
291
- analyser.maxDecibels = -10;
292
- const audioBufferSource = audioContext.createBufferSource();
293
- audioBufferSource.buffer = audioBuffer;
294
- const filter = audioContext.createBiquadFilter();
295
- filter.type = "bandpass";
296
- filter.frequency.value = 1e3;
297
- filter.Q.value = 0.5;
298
- audioBufferSource.connect(gainNode);
299
- gainNode.connect(filter);
300
- filter.connect(analyser);
294
+ analyser.maxDecibels = -20;
295
+ const gainNode = audioContext.createGain();
296
+ gainNode.gain.value = this.fftGain;
297
+ source.connect(gainNode);
298
+ gainNode.connect(analyser);
301
299
  analyser.connect(audioContext.destination);
302
- audioBufferSource.start(0, startTime, 1 / 30);
300
+ source.start(0, startTime, 1 / 30);
301
+ const dataLength = analyser.fftSize / 2;
303
302
  try {
304
303
  await audioContext.startRendering();
305
- const frameData = new Uint8Array(analyser.fftSize);
304
+ const frameData = new Uint8Array(dataLength);
306
305
  analyser.getByteTimeDomainData(frameData);
307
- this.#byteTimeDomainCache.set(cacheKey, frameData);
308
- return frameData;
306
+ const points = new Uint8Array(dataLength);
307
+ for (let i = 0; i < dataLength; i++) {
308
+ const pointSamples = frameData.slice(
309
+ i * (frameData.length / dataLength),
310
+ (i + 1) * (frameData.length / dataLength)
311
+ );
312
+ const rms = Math.sqrt(
313
+ pointSamples.reduce((sum, sample) => {
314
+ const normalized = (sample - 128) / 128;
315
+ return sum + normalized * normalized;
316
+ }, 0) / pointSamples.length
317
+ );
318
+ const avgSign = Math.sign(
319
+ pointSamples.reduce((sum, sample) => sum + (sample - 128), 0)
320
+ );
321
+ points[i] = Math.min(255, Math.round(128 + avgSign * rms * 128));
322
+ }
323
+ this.#byteTimeDomainCache.set(cacheKey, points);
324
+ return points;
309
325
  } finally {
310
- audioBufferSource.disconnect();
326
+ source.disconnect();
311
327
  analyser.disconnect();
312
328
  }
313
329
  })
@@ -319,15 +335,12 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
319
335
  let weightSum = 0;
320
336
  framesData.forEach((frame, frameIndex) => {
321
337
  const decayWeight = _EFMedia2.DECAY_WEIGHT ** frameIndex;
322
- weightedSum += frame[i] * decayWeight;
338
+ weightedSum += (frame[i] ?? 0) * decayWeight;
323
339
  weightSum += decayWeight;
324
340
  });
325
341
  smoothedData[i] = Math.min(255, Math.round(weightedSum / weightSum));
326
342
  }
327
- this.#byteTimeDomainCache.set(
328
- smoothedKey,
329
- smoothedData.slice(0, Math.floor(smoothedData.length * 0.8))
330
- );
343
+ this.#byteTimeDomainCache.set(smoothedKey, smoothedData);
331
344
  return smoothedData;
332
345
  }
333
346
  });
@@ -338,9 +351,9 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
338
351
  this.audioBufferTask.status,
339
352
  this.currentSourceTimeMs,
340
353
  this.fftSize,
341
- // Add fftSize to dependency array
342
- this.fftDecay
343
- // Add fftDecay to dependency array
354
+ this.fftDecay,
355
+ this.fftGain,
356
+ this.shouldInterpolateFrequencies
344
357
  ],
345
358
  task: async () => {
346
359
  await this.audioBufferTask.taskComplete;
@@ -349,7 +362,7 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
349
362
  const currentTimeMs = this.currentSourceTimeMs;
350
363
  const startOffsetMs = this.audioBufferTask.value.startOffsetMs;
351
364
  const audioBuffer = this.audioBufferTask.value.buffer;
352
- const smoothedKey = `${this.fftSize}:${this.fftDecay}:${startOffsetMs}:${currentTimeMs}`;
365
+ const smoothedKey = `${this.shouldInterpolateFrequencies}:${this.fftSize}:${this.fftDecay}:${this.fftGain}:${startOffsetMs}:${currentTimeMs}`;
353
366
  const cachedSmoothedData = this.#frequencyDataCache.get(smoothedKey);
354
367
  if (cachedSmoothedData) {
355
368
  return cachedSmoothedData;
@@ -361,7 +374,7 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
361
374
  0,
362
375
  (currentTimeMs - frameOffset - startOffsetMs) / 1e3
363
376
  );
364
- const cacheKey = `${this.fftSize}:${startOffsetMs}:${startTime}`;
377
+ const cacheKey = `${this.shouldInterpolateFrequencies}:${this.fftSize}:${this.fftGain}:${startOffsetMs}:${startTime}`;
365
378
  const cachedFrame = this.#frequencyDataCache.get(cacheKey);
366
379
  if (cachedFrame) {
367
380
  return cachedFrame;
@@ -373,11 +386,19 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
373
386
  );
374
387
  const analyser = audioContext.createAnalyser();
375
388
  analyser.fftSize = this.fftSize;
376
- analyser.minDecibels = _EFMedia2.MIN_DB;
377
- analyser.maxDecibels = _EFMedia2.MAX_DB;
389
+ analyser.minDecibels = -90;
390
+ analyser.maxDecibels = -10;
391
+ const gainNode = audioContext.createGain();
392
+ gainNode.gain.value = this.fftGain;
393
+ const filter = audioContext.createBiquadFilter();
394
+ filter.type = "bandpass";
395
+ filter.frequency.value = 15e3;
396
+ filter.Q.value = 0.05;
378
397
  const audioBufferSource = audioContext.createBufferSource();
379
398
  audioBufferSource.buffer = audioBuffer;
380
- audioBufferSource.connect(analyser);
399
+ audioBufferSource.connect(filter);
400
+ filter.connect(gainNode);
401
+ gainNode.connect(analyser);
381
402
  analyser.connect(audioContext.destination);
382
403
  audioBufferSource.start(0, startTime, 1 / 30);
383
404
  try {
@@ -412,8 +433,9 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
412
433
  0,
413
434
  Math.floor(smoothedData.length / 2)
414
435
  );
415
- this.#frequencyDataCache.set(smoothedKey, slicedData);
416
- return slicedData;
436
+ const processedData = this.shouldInterpolateFrequencies ? processFFTData(slicedData) : slicedData;
437
+ this.#frequencyDataCache.set(smoothedKey, processedData);
438
+ return processedData;
417
439
  }
418
440
  });
419
441
  }
@@ -437,18 +459,12 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
437
459
  }
438
460
  fragmentIndexPath() {
439
461
  if (this.assetId) {
440
- if (EF_RENDERING()) {
441
- return `editframe://api/v1/isobmff_files/${this.assetId}/index`;
442
- }
443
462
  return `${this.apiHost}/api/v1/isobmff_files/${this.assetId}/index`;
444
463
  }
445
464
  return `/@ef-track-fragment-index/${this.src ?? ""}`;
446
465
  }
447
466
  fragmentTrackPath(trackId) {
448
467
  if (this.assetId) {
449
- if (EF_RENDERING()) {
450
- return `editframe://api/v1/isobmff_tracks/${this.assetId}/${trackId}`;
451
- }
452
468
  return `${this.apiHost}/api/v1/isobmff_tracks/${this.assetId}/${trackId}`;
453
469
  }
454
470
  return `/@ef-track/${this.src ?? ""}?trackId=${trackId}`;
@@ -471,71 +487,13 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
471
487
  this.executeSeek(this.currentSourceTimeMs);
472
488
  }
473
489
  if (changedProperties.has("currentTime") || changedProperties.has("ownCurrentTimeMs")) {
474
- const timelineTimeMs = (this.rootTimegroup ?? this).currentTimeMs;
475
- if (this.startTimeMs > timelineTimeMs || this.endTimeMs < timelineTimeMs) {
476
- this.style.display = "none";
477
- return;
478
- }
479
- this.style.display = "";
480
- const animations = this.getAnimations({ subtree: true });
481
- this.style.setProperty("--ef-duration", `${this.durationMs}ms`);
482
- this.style.setProperty(
483
- "--ef-transition-duration",
484
- `${this.parentTimegroup?.overlapMs ?? 0}ms`
485
- );
486
- this.style.setProperty(
487
- "--ef-transition-out-start",
488
- `${this.durationMs - (this.parentTimegroup?.overlapMs ?? 0)}ms`
489
- );
490
- for (const animation of animations) {
491
- if (animation.playState === "running") {
492
- animation.pause();
493
- }
494
- const effect = animation.effect;
495
- if (!(effect && effect instanceof KeyframeEffect)) {
496
- return;
497
- }
498
- const target = effect.target;
499
- if (!target) {
500
- return;
501
- }
502
- if (target.closest("ef-video, ef-audio") !== this) {
503
- return;
504
- }
505
- if (isEFTemporal(target)) {
506
- const timing = effect.getTiming();
507
- const duration = Number(timing.duration) ?? 0;
508
- const delay = Number(timing.delay);
509
- const newTime = Math.floor(
510
- Math.min(target.ownCurrentTimeMs, duration - 1 + delay)
511
- );
512
- if (Number.isNaN(newTime)) {
513
- return;
514
- }
515
- animation.currentTime = newTime;
516
- } else if (target) {
517
- const nearestTimegroup = target.closest("ef-timegroup");
518
- if (!nearestTimegroup) {
519
- return;
520
- }
521
- const timing = effect.getTiming();
522
- const duration = Number(timing.duration) ?? 0;
523
- const delay = Number(timing.delay);
524
- const newTime = Math.floor(
525
- Math.min(nearestTimegroup.ownCurrentTimeMs, duration - 1 + delay)
526
- );
527
- if (Number.isNaN(newTime)) {
528
- return;
529
- }
530
- animation.currentTime = newTime;
531
- }
532
- }
490
+ updateAnimations(this);
533
491
  }
534
492
  }
535
493
  get hasOwnDuration() {
536
494
  return true;
537
495
  }
538
- get durationMs() {
496
+ get intrinsicDurationMs() {
539
497
  if (!this.trackFragmentIndexLoader.value) {
540
498
  return 0;
541
499
  }
@@ -547,30 +505,18 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
547
505
  if (durations.length === 0) {
548
506
  return 0;
549
507
  }
550
- if (this.sourceInMs && this.sourceOutMs && this.sourceOutMs > this.sourceInMs) {
551
- return Math.max(this.sourceOutMs - this.sourceInMs);
552
- }
553
- if (this.sourceInMs) {
554
- return Math.max(...durations) - this.trimStartMs - this.trimEndMs - this.sourceInMs;
555
- }
556
- if (this.sourceOutMs) {
557
- return Math.max(...durations) - this.trimStartMs - this.trimEndMs - this.sourceOutMs;
558
- }
559
- if (this.sourceInMs && this.sourceOutMs) {
560
- return Math.max(...durations) - this.trimStartMs - this.trimEndMs - this.sourceOutMs - this.sourceInMs;
561
- }
562
- return Math.max(...durations) - this.trimStartMs - this.trimEndMs;
508
+ return Math.max(...durations);
563
509
  }
564
510
  #audioContext;
565
511
  async fetchAudioSpanningTime(fromMs, toMs) {
566
512
  if (this.sourceInMs) {
567
- fromMs -= this.startTimeMs - this.trimStartMs - this.sourceInMs;
513
+ fromMs -= this.startTimeMs - (this.trimStartMs ?? 0) - (this.sourceInMs ?? 0);
568
514
  }
569
515
  if (this.sourceOutMs) {
570
- toMs -= this.startTimeMs - this.trimStartMs - this.sourceOutMs;
516
+ toMs -= this.startTimeMs - (this.trimStartMs ?? 0) - (this.sourceOutMs ?? 0);
571
517
  }
572
- fromMs -= this.startTimeMs - this.trimStartMs;
573
- toMs -= this.startTimeMs - this.trimStartMs;
518
+ fromMs -= this.startTimeMs - (this.trimStartMs ?? 0);
519
+ toMs -= this.startTimeMs - (this.trimStartMs ?? 0);
574
520
  await this.trackFragmentIndexLoader.taskComplete;
575
521
  const audioTrackId = this.defaultAudioTrackId;
576
522
  if (!audioTrackId) {
@@ -624,15 +570,39 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
624
570
  });
625
571
  return {
626
572
  blob: audioBlob,
627
- startMs: firstFragment.dts / audioTrackIndex.timescale * 1e3 - this.trimStartMs,
628
- endMs: lastFragment.dts / audioTrackIndex.timescale * 1e3 + lastFragment.duration / audioTrackIndex.timescale * 1e3 - this.trimEndMs
573
+ startMs: firstFragment.dts / audioTrackIndex.timescale * 1e3 - (this.trimStartMs ?? 0),
574
+ endMs: lastFragment.dts / audioTrackIndex.timescale * 1e3 + lastFragment.duration / audioTrackIndex.timescale * 1e3 - (this.trimEndMs ?? 0)
629
575
  };
630
576
  }
631
- static {
632
- this.MIN_DB = -90;
577
+ set fftSize(value) {
578
+ const oldValue = this.fftSize;
579
+ this.setAttribute("fft-size", String(value));
580
+ this.requestUpdate("fft-size", oldValue);
633
581
  }
634
- static {
635
- this.MAX_DB = -20;
582
+ set fftDecay(value) {
583
+ const oldValue = this.fftDecay;
584
+ this.setAttribute("fft-decay", String(value));
585
+ this.requestUpdate("fft-decay", oldValue);
586
+ }
587
+ get fftSize() {
588
+ return Number.parseInt(this.getAttribute("fft-size") ?? "128", 10);
589
+ }
590
+ get fftDecay() {
591
+ return Number.parseInt(this.getAttribute("fft-decay") ?? "8", 10);
592
+ }
593
+ set interpolateFrequencies(value) {
594
+ const oldValue = this.interpolateFrequencies;
595
+ this.setAttribute("interpolate-frequencies", String(value));
596
+ this.requestUpdate("interpolate-frequencies", oldValue);
597
+ }
598
+ get interpolateFrequencies() {
599
+ return this.getAttribute("interpolate-frequencies") !== "false";
600
+ }
601
+ get shouldInterpolateFrequencies() {
602
+ if (this.hasAttribute("interpolate-frequencies")) {
603
+ return this.getAttribute("interpolate-frequencies") !== "false";
604
+ }
605
+ return false;
636
606
  }
637
607
  static {
638
608
  this.DECAY_WEIGHT = 0.7;
@@ -657,6 +627,14 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
657
627
  }
658
628
  #byteTimeDomainCache;
659
629
  #frequencyDataCache;
630
+ set fftGain(value) {
631
+ const oldValue = this.fftGain;
632
+ this.setAttribute("fft-gain", String(value));
633
+ this.requestUpdate("fft-gain", oldValue);
634
+ }
635
+ get fftGain() {
636
+ return Number.parseFloat(this.getAttribute("fft-gain") ?? "3.0");
637
+ }
660
638
  };
661
639
  __decorateClass([
662
640
  property({ type: Number })
@@ -667,13 +645,53 @@ __decorateClass([
667
645
  __decorateClass([
668
646
  state()
669
647
  ], _EFMedia.prototype, "desiredSeekTimeMs", 2);
670
- __decorateClass([
671
- property({ type: Number })
672
- ], _EFMedia.prototype, "fftSize", 2);
673
- __decorateClass([
674
- property({ type: Number })
675
- ], _EFMedia.prototype, "fftDecay", 2);
676
648
  let EFMedia = _EFMedia;
649
+ function processFFTData(fftData, zeroThresholdPercent = 0.1) {
650
+ const totalBins = fftData.length;
651
+ const zeroThresholdCount = Math.floor(totalBins * zeroThresholdPercent);
652
+ let zeroCount = 0;
653
+ let cutoffIndex = totalBins;
654
+ for (let i = totalBins - 1; i >= 0; i--) {
655
+ if (fftData[i] < 10) {
656
+ zeroCount++;
657
+ } else {
658
+ if (zeroCount >= zeroThresholdCount) {
659
+ cutoffIndex = i + 1;
660
+ break;
661
+ }
662
+ }
663
+ }
664
+ if (cutoffIndex < zeroThresholdCount) {
665
+ return fftData;
666
+ }
667
+ const goodData = fftData.slice(0, cutoffIndex);
668
+ const resampledData = interpolateData(goodData, fftData.length);
669
+ const attenuationStartIndex = Math.floor(totalBins * 0.9);
670
+ for (let i = attenuationStartIndex; i < totalBins; i++) {
671
+ const attenuationProgress = (i - attenuationStartIndex) / (totalBins - attenuationStartIndex) + 0.2;
672
+ const attenuationFactor = Math.max(0, 1 - attenuationProgress);
673
+ resampledData[i] = Math.floor(resampledData[i] * attenuationFactor);
674
+ }
675
+ return resampledData;
676
+ }
677
+ function interpolateData(data, targetSize) {
678
+ const resampled = new Uint8Array(targetSize);
679
+ const dataLength = data.length;
680
+ for (let i = 0; i < targetSize; i++) {
681
+ const ratio = i / (targetSize - 1) * (dataLength - 1);
682
+ const index = Math.floor(ratio);
683
+ const fraction = ratio - index;
684
+ if (index >= dataLength - 1) {
685
+ resampled[i] = data[dataLength - 1];
686
+ } else {
687
+ resampled[i] = Math.round(
688
+ // biome-ignore lint/style/noNonNullAssertion: Manual bounds check
689
+ data[index] * (1 - fraction) + data[index + 1] * fraction
690
+ );
691
+ }
692
+ }
693
+ return resampled;
694
+ }
677
695
  export {
678
696
  EFMedia,
679
697
  deepGetMediaElements
@@ -25,7 +25,8 @@ function EFSourceMixin(superClass, options) {
25
25
  });
26
26
  }
27
27
  get apiHost() {
28
- return this.closest("ef-configuration")?.apiHost ?? this.closest("ef-workbench")?.apiHost ?? this.closest("ef-preview")?.apiHost ?? "https://editframe.dev";
28
+ const apiHost = this.closest("ef-configuration")?.apiHost ?? this.closest("ef-workbench")?.apiHost ?? this.closest("ef-preview")?.apiHost;
29
+ return apiHost || "https://editframe.dev";
29
30
  }
30
31
  productionSrc() {
31
32
  if (!this.md5SumLoader.value) {
@@ -1,10 +1,11 @@
1
1
  import { LitElement } from 'lit';
2
- declare const TestTemporal_base: (new (...args: any[]) => import('./EFTemporal.js').TemporalMixinInterface) & typeof LitElement;
3
- declare class TestTemporal extends TestTemporal_base {
2
+ declare const TenSeconds_base: (new (...args: any[]) => import('./EFTemporal.js').TemporalMixinInterface) & typeof LitElement;
3
+ declare class TenSeconds extends TenSeconds_base {
4
+ get intrinsicDurationMs(): number;
4
5
  }
5
6
  declare global {
6
7
  interface HTMLElementTagNameMap {
7
- "test-temporal": TestTemporal;
8
+ "ten-seconds": TenSeconds;
8
9
  }
9
10
  }
10
11
  export {};
@@ -10,6 +10,7 @@ export declare class TemporalMixinInterface {
10
10
  * Whether the element has a duration set as an attribute.
11
11
  */
12
12
  get hasExplicitDuration(): boolean;
13
+ get sourceStartMs(): number;
13
14
  /**
14
15
  * Used to trim the start of the media.
15
16
  *
@@ -19,7 +20,7 @@ export declare class TemporalMixinInterface {
19
20
  *
20
21
  * @domAttribute "trimstart"
21
22
  */
22
- get trimStartMs(): number;
23
+ get trimStartMs(): number | undefined;
23
24
  /**
24
25
  * Used to trim the end of the media.
25
26
  *
@@ -30,10 +31,10 @@ export declare class TemporalMixinInterface {
30
31
  * @domAttribute "trimend"
31
32
  */
32
33
  get trimEndMs(): number;
33
- set trimStartMs(value: number);
34
- set trimEndMs(value: number);
35
- set trimstart(value: string);
36
- set trimend(value: string);
34
+ set trimStartMs(value: number | undefined);
35
+ set trimEndMs(value: number | undefined);
36
+ set trimstart(value: string | undefined);
37
+ set trimend(value: string | undefined);
37
38
  /**
38
39
  * The source in time of the element.
39
40
  *
@@ -53,7 +54,7 @@ export declare class TemporalMixinInterface {
53
54
  *
54
55
  * @domAttribute "sourcein"
55
56
  */
56
- get sourceInMs(): number;
57
+ get sourceInMs(): number | undefined;
57
58
  /**
58
59
  * The source out time of the element.
59
60
  *
@@ -75,15 +76,17 @@ export declare class TemporalMixinInterface {
75
76
  *
76
77
  * @domAttribute "sourceout"
77
78
  */
78
- get sourceOutMs(): number;
79
- set sourceInMs(value: number);
80
- set sourceOutMs(value: number);
81
- set sourcein(value: string);
82
- set sourceout(value: string);
79
+ get sourceOutMs(): number | undefined;
80
+ set sourceInMs(value: number | undefined);
81
+ set sourceOutMs(value: number | undefined);
82
+ set sourcein(value: string | undefined);
83
+ set sourceout(value: string | undefined);
83
84
  /**
84
85
  * @domAttribute "duration"
85
86
  */
86
87
  get durationMs(): number;
88
+ get explicitDurationMs(): number | undefined;
89
+ get intrinsicDurationMs(): number | undefined;
87
90
  /**
88
91
  * The start time of the element within its root timegroup in milliseconds.
89
92
  *