@editframe/elements 0.15.0-beta.9 → 0.16.0-beta.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/dist/EF_FRAMEGEN.d.ts +14 -10
  2. package/dist/EF_FRAMEGEN.js +17 -28
  3. package/dist/elements/EFCaptions.js +0 -7
  4. package/dist/elements/EFImage.js +0 -4
  5. package/dist/elements/EFMedia.d.ts +13 -8
  6. package/dist/elements/EFMedia.js +163 -146
  7. package/dist/elements/EFSourceMixin.js +2 -1
  8. package/dist/elements/EFTemporal.browsertest.d.ts +4 -3
  9. package/dist/elements/EFTemporal.d.ts +14 -11
  10. package/dist/elements/EFTemporal.js +63 -87
  11. package/dist/elements/EFTimegroup.d.ts +2 -4
  12. package/dist/elements/EFTimegroup.js +15 -103
  13. package/dist/elements/EFVideo.js +3 -1
  14. package/dist/elements/EFWaveform.d.ts +1 -1
  15. package/dist/elements/EFWaveform.js +11 -28
  16. package/dist/elements/durationConverter.d.ts +8 -8
  17. package/dist/elements/durationConverter.js +2 -2
  18. package/dist/elements/updateAnimations.d.ts +9 -0
  19. package/dist/elements/updateAnimations.js +62 -0
  20. package/dist/getRenderInfo.d.ts +51 -0
  21. package/dist/getRenderInfo.js +72 -0
  22. package/dist/gui/EFFilmstrip.js +7 -16
  23. package/dist/gui/EFFitScale.d.ts +27 -0
  24. package/dist/gui/EFFitScale.js +138 -0
  25. package/dist/gui/EFWorkbench.d.ts +2 -5
  26. package/dist/gui/EFWorkbench.js +11 -56
  27. package/dist/gui/TWMixin.css.js +1 -1
  28. package/dist/gui/TWMixin.js +14 -2
  29. package/dist/index.d.ts +2 -0
  30. package/dist/index.js +6 -1
  31. package/dist/style.css +3 -3
  32. package/package.json +4 -3
  33. package/src/elements/EFCaptions.browsertest.ts +2 -2
  34. package/src/elements/EFCaptions.ts +0 -7
  35. package/src/elements/EFImage.browsertest.ts +2 -2
  36. package/src/elements/EFImage.ts +0 -4
  37. package/src/elements/EFMedia.browsertest.ts +14 -14
  38. package/src/elements/EFMedia.ts +219 -182
  39. package/src/elements/EFSourceMixin.ts +4 -4
  40. package/src/elements/EFTemporal.browsertest.ts +64 -31
  41. package/src/elements/EFTemporal.ts +99 -119
  42. package/src/elements/EFTimegroup.ts +15 -133
  43. package/src/elements/EFVideo.ts +3 -1
  44. package/src/elements/EFWaveform.ts +10 -44
  45. package/src/elements/durationConverter.ts +9 -4
  46. package/src/elements/updateAnimations.ts +88 -0
  47. package/src/gui/ContextMixin.ts +0 -3
  48. package/src/gui/EFFilmstrip.ts +7 -16
  49. package/src/gui/EFFitScale.ts +152 -0
  50. package/src/gui/EFWorkbench.ts +16 -65
  51. package/src/gui/TWMixin.ts +19 -2
  52. package/types.json +1 -1
@@ -6,11 +6,11 @@ import { property, state } from "lit/decorators.js";
6
6
  import { VideoAsset } from "@editframe/assets/EncodedAsset.js";
7
7
  import { MP4File } from "@editframe/assets/MP4File.js";
8
8
  import { EF_INTERACTIVE } from "../EF_INTERACTIVE.js";
9
- import { EF_RENDERING } from "../EF_RENDERING.js";
10
9
  import { EFSourceMixin } from "./EFSourceMixin.js";
11
- import { EFTemporal, isEFTemporal } from "./EFTemporal.js";
10
+ import { EFTemporal } from "./EFTemporal.js";
12
11
  import { FetchMixin } from "./FetchMixin.js";
13
12
  import { EFTargetable } from "./TargetController.js";
13
+ import { updateAnimations } from "./updateAnimations.js";
14
14
  var __defProp = Object.defineProperty;
15
15
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
16
16
  var __decorateClass = (decorators, target, key, kind) => {
@@ -70,12 +70,19 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
70
70
  this.trackFragmentIndexLoader = new Task(this, {
71
71
  args: () => [this.fragmentIndexPath(), this.fetch],
72
72
  task: async ([fragmentIndexPath, fetch], { signal }) => {
73
- const response = await fetch(fragmentIndexPath, { signal });
74
- return await response.json();
73
+ try {
74
+ const response = await fetch(fragmentIndexPath, { signal });
75
+ return await response.json();
76
+ } catch (error) {
77
+ log("Failed to load track fragment index", error);
78
+ return void 0;
79
+ }
75
80
  },
76
81
  onComplete: () => {
82
+ this.requestUpdate("intrinsicDurationMs");
77
83
  this.requestUpdate("ownCurrentTimeMs");
78
84
  this.rootTimegroup?.requestUpdate("ownCurrentTimeMs");
85
+ this.rootTimegroup?.requestUpdate("durationMs");
79
86
  }
80
87
  });
81
88
  this.initSegmentsLoader = new Task(this, {
@@ -242,8 +249,6 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
242
249
  };
243
250
  }
244
251
  });
245
- this.fftSize = 512;
246
- this.fftDecay = 8;
247
252
  this.#byteTimeDomainCache = new LRUCache(100);
248
253
  this.byteTimeDomainTask = new Task(this, {
249
254
  autoRun: EF_INTERACTIVE,
@@ -251,7 +256,9 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
251
256
  this.audioBufferTask.status,
252
257
  this.currentSourceTimeMs,
253
258
  this.fftSize,
254
- this.fftDecay
259
+ this.fftDecay,
260
+ this.fftGain,
261
+ this.shouldInterpolateFrequencies
255
262
  ],
256
263
  task: async () => {
257
264
  await this.audioBufferTask.taskComplete;
@@ -260,54 +267,62 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
260
267
  const currentTimeMs = this.currentSourceTimeMs;
261
268
  const startOffsetMs = this.audioBufferTask.value.startOffsetMs;
262
269
  const audioBuffer = this.audioBufferTask.value.buffer;
263
- const smoothedKey = `${this.fftSize}:${this.fftDecay}:${startOffsetMs}:${currentTimeMs}`;
264
- const cachedSmoothedData = this.#byteTimeDomainCache.get(smoothedKey);
265
- if (cachedSmoothedData) {
266
- return cachedSmoothedData;
267
- }
270
+ const smoothedKey = `${this.shouldInterpolateFrequencies}:${this.fftSize}:${this.fftDecay}:${this.fftGain}:${startOffsetMs}:${currentTimeMs}`;
271
+ const cachedData = this.#byteTimeDomainCache.get(smoothedKey);
272
+ if (cachedData) return cachedData;
268
273
  const framesData = await Promise.all(
269
- Array.from({ length: this.fftDecay }, async (_, i) => {
270
- const frameOffset = i * (1e3 / 30);
274
+ Array.from({ length: this.fftDecay }, async (_, frameIndex) => {
275
+ const frameOffset = frameIndex * (1e3 / 30);
271
276
  const startTime = Math.max(
272
277
  0,
273
278
  (currentTimeMs - frameOffset - startOffsetMs) / 1e3
274
279
  );
275
- const cacheKey = `${this.fftSize}:${startOffsetMs}:${startTime}`;
280
+ const cacheKey = `${this.shouldInterpolateFrequencies}:${this.fftSize}:${this.fftGain}:${startOffsetMs}:${startTime}`;
276
281
  const cachedFrame = this.#byteTimeDomainCache.get(cacheKey);
277
- if (cachedFrame) {
278
- return cachedFrame;
279
- }
282
+ if (cachedFrame) return cachedFrame;
280
283
  const audioContext = new OfflineAudioContext(
281
284
  2,
282
285
  48e3 * (1 / 30),
283
286
  48e3
284
287
  );
288
+ const source = audioContext.createBufferSource();
289
+ source.buffer = audioBuffer;
285
290
  const analyser = audioContext.createAnalyser();
286
291
  analyser.fftSize = this.fftSize;
287
- const gainNode = audioContext.createGain();
288
- gainNode.gain.value = 10;
289
- analyser.smoothingTimeConstant = 0.4;
290
292
  analyser.minDecibels = -90;
291
- analyser.maxDecibels = -10;
292
- const audioBufferSource = audioContext.createBufferSource();
293
- audioBufferSource.buffer = audioBuffer;
294
- const filter = audioContext.createBiquadFilter();
295
- filter.type = "bandpass";
296
- filter.frequency.value = 1e3;
297
- filter.Q.value = 0.5;
298
- audioBufferSource.connect(gainNode);
299
- gainNode.connect(filter);
300
- filter.connect(analyser);
293
+ analyser.maxDecibels = -20;
294
+ const gainNode = audioContext.createGain();
295
+ gainNode.gain.value = this.fftGain;
296
+ source.connect(gainNode);
297
+ gainNode.connect(analyser);
301
298
  analyser.connect(audioContext.destination);
302
- audioBufferSource.start(0, startTime, 1 / 30);
299
+ source.start(0, startTime, 1 / 30);
300
+ const dataLength = analyser.fftSize / 2;
303
301
  try {
304
302
  await audioContext.startRendering();
305
- const frameData = new Uint8Array(analyser.fftSize);
303
+ const frameData = new Uint8Array(dataLength);
306
304
  analyser.getByteTimeDomainData(frameData);
307
- this.#byteTimeDomainCache.set(cacheKey, frameData);
308
- return frameData;
305
+ const points = new Uint8Array(dataLength);
306
+ for (let i = 0; i < dataLength; i++) {
307
+ const pointSamples = frameData.slice(
308
+ i * (frameData.length / dataLength),
309
+ (i + 1) * (frameData.length / dataLength)
310
+ );
311
+ const rms = Math.sqrt(
312
+ pointSamples.reduce((sum, sample) => {
313
+ const normalized = (sample - 128) / 128;
314
+ return sum + normalized * normalized;
315
+ }, 0) / pointSamples.length
316
+ );
317
+ const avgSign = Math.sign(
318
+ pointSamples.reduce((sum, sample) => sum + (sample - 128), 0)
319
+ );
320
+ points[i] = Math.min(255, Math.round(128 + avgSign * rms * 128));
321
+ }
322
+ this.#byteTimeDomainCache.set(cacheKey, points);
323
+ return points;
309
324
  } finally {
310
- audioBufferSource.disconnect();
325
+ source.disconnect();
311
326
  analyser.disconnect();
312
327
  }
313
328
  })
@@ -319,15 +334,12 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
319
334
  let weightSum = 0;
320
335
  framesData.forEach((frame, frameIndex) => {
321
336
  const decayWeight = _EFMedia2.DECAY_WEIGHT ** frameIndex;
322
- weightedSum += frame[i] * decayWeight;
337
+ weightedSum += (frame[i] ?? 0) * decayWeight;
323
338
  weightSum += decayWeight;
324
339
  });
325
340
  smoothedData[i] = Math.min(255, Math.round(weightedSum / weightSum));
326
341
  }
327
- this.#byteTimeDomainCache.set(
328
- smoothedKey,
329
- smoothedData.slice(0, Math.floor(smoothedData.length * 0.8))
330
- );
342
+ this.#byteTimeDomainCache.set(smoothedKey, smoothedData);
331
343
  return smoothedData;
332
344
  }
333
345
  });
@@ -338,9 +350,9 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
338
350
  this.audioBufferTask.status,
339
351
  this.currentSourceTimeMs,
340
352
  this.fftSize,
341
- // Add fftSize to dependency array
342
- this.fftDecay
343
- // Add fftDecay to dependency array
353
+ this.fftDecay,
354
+ this.fftGain,
355
+ this.shouldInterpolateFrequencies
344
356
  ],
345
357
  task: async () => {
346
358
  await this.audioBufferTask.taskComplete;
@@ -349,7 +361,7 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
349
361
  const currentTimeMs = this.currentSourceTimeMs;
350
362
  const startOffsetMs = this.audioBufferTask.value.startOffsetMs;
351
363
  const audioBuffer = this.audioBufferTask.value.buffer;
352
- const smoothedKey = `${this.fftSize}:${this.fftDecay}:${startOffsetMs}:${currentTimeMs}`;
364
+ const smoothedKey = `${this.shouldInterpolateFrequencies}:${this.fftSize}:${this.fftDecay}:${this.fftGain}:${startOffsetMs}:${currentTimeMs}`;
353
365
  const cachedSmoothedData = this.#frequencyDataCache.get(smoothedKey);
354
366
  if (cachedSmoothedData) {
355
367
  return cachedSmoothedData;
@@ -361,7 +373,7 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
361
373
  0,
362
374
  (currentTimeMs - frameOffset - startOffsetMs) / 1e3
363
375
  );
364
- const cacheKey = `${this.fftSize}:${startOffsetMs}:${startTime}`;
376
+ const cacheKey = `${this.shouldInterpolateFrequencies}:${this.fftSize}:${this.fftGain}:${startOffsetMs}:${startTime}`;
365
377
  const cachedFrame = this.#frequencyDataCache.get(cacheKey);
366
378
  if (cachedFrame) {
367
379
  return cachedFrame;
@@ -373,11 +385,19 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
373
385
  );
374
386
  const analyser = audioContext.createAnalyser();
375
387
  analyser.fftSize = this.fftSize;
376
- analyser.minDecibels = _EFMedia2.MIN_DB;
377
- analyser.maxDecibels = _EFMedia2.MAX_DB;
388
+ analyser.minDecibels = -90;
389
+ analyser.maxDecibels = -10;
390
+ const gainNode = audioContext.createGain();
391
+ gainNode.gain.value = this.fftGain;
392
+ const filter = audioContext.createBiquadFilter();
393
+ filter.type = "bandpass";
394
+ filter.frequency.value = 15e3;
395
+ filter.Q.value = 0.05;
378
396
  const audioBufferSource = audioContext.createBufferSource();
379
397
  audioBufferSource.buffer = audioBuffer;
380
- audioBufferSource.connect(analyser);
398
+ audioBufferSource.connect(filter);
399
+ filter.connect(gainNode);
400
+ gainNode.connect(analyser);
381
401
  analyser.connect(audioContext.destination);
382
402
  audioBufferSource.start(0, startTime, 1 / 30);
383
403
  try {
@@ -412,8 +432,9 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
412
432
  0,
413
433
  Math.floor(smoothedData.length / 2)
414
434
  );
415
- this.#frequencyDataCache.set(smoothedKey, slicedData);
416
- return slicedData;
435
+ const processedData = this.shouldInterpolateFrequencies ? processFFTData(slicedData) : slicedData;
436
+ this.#frequencyDataCache.set(smoothedKey, processedData);
437
+ return processedData;
417
438
  }
418
439
  });
419
440
  }
@@ -437,18 +458,12 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
437
458
  }
438
459
  fragmentIndexPath() {
439
460
  if (this.assetId) {
440
- if (EF_RENDERING()) {
441
- return `editframe://api/v1/isobmff_files/${this.assetId}/index`;
442
- }
443
461
  return `${this.apiHost}/api/v1/isobmff_files/${this.assetId}/index`;
444
462
  }
445
463
  return `/@ef-track-fragment-index/${this.src ?? ""}`;
446
464
  }
447
465
  fragmentTrackPath(trackId) {
448
466
  if (this.assetId) {
449
- if (EF_RENDERING()) {
450
- return `editframe://api/v1/isobmff_tracks/${this.assetId}/${trackId}`;
451
- }
452
467
  return `${this.apiHost}/api/v1/isobmff_tracks/${this.assetId}/${trackId}`;
453
468
  }
454
469
  return `/@ef-track/${this.src ?? ""}?trackId=${trackId}`;
@@ -471,71 +486,13 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
471
486
  this.executeSeek(this.currentSourceTimeMs);
472
487
  }
473
488
  if (changedProperties.has("currentTime") || changedProperties.has("ownCurrentTimeMs")) {
474
- const timelineTimeMs = (this.rootTimegroup ?? this).currentTimeMs;
475
- if (this.startTimeMs > timelineTimeMs || this.endTimeMs < timelineTimeMs) {
476
- this.style.display = "none";
477
- return;
478
- }
479
- this.style.display = "";
480
- const animations = this.getAnimations({ subtree: true });
481
- this.style.setProperty("--ef-duration", `${this.durationMs}ms`);
482
- this.style.setProperty(
483
- "--ef-transition-duration",
484
- `${this.parentTimegroup?.overlapMs ?? 0}ms`
485
- );
486
- this.style.setProperty(
487
- "--ef-transition-out-start",
488
- `${this.durationMs - (this.parentTimegroup?.overlapMs ?? 0)}ms`
489
- );
490
- for (const animation of animations) {
491
- if (animation.playState === "running") {
492
- animation.pause();
493
- }
494
- const effect = animation.effect;
495
- if (!(effect && effect instanceof KeyframeEffect)) {
496
- return;
497
- }
498
- const target = effect.target;
499
- if (!target) {
500
- return;
501
- }
502
- if (target.closest("ef-video, ef-audio") !== this) {
503
- return;
504
- }
505
- if (isEFTemporal(target)) {
506
- const timing = effect.getTiming();
507
- const duration = Number(timing.duration) ?? 0;
508
- const delay = Number(timing.delay);
509
- const newTime = Math.floor(
510
- Math.min(target.ownCurrentTimeMs, duration - 1 + delay)
511
- );
512
- if (Number.isNaN(newTime)) {
513
- return;
514
- }
515
- animation.currentTime = newTime;
516
- } else if (target) {
517
- const nearestTimegroup = target.closest("ef-timegroup");
518
- if (!nearestTimegroup) {
519
- return;
520
- }
521
- const timing = effect.getTiming();
522
- const duration = Number(timing.duration) ?? 0;
523
- const delay = Number(timing.delay);
524
- const newTime = Math.floor(
525
- Math.min(nearestTimegroup.ownCurrentTimeMs, duration - 1 + delay)
526
- );
527
- if (Number.isNaN(newTime)) {
528
- return;
529
- }
530
- animation.currentTime = newTime;
531
- }
532
- }
489
+ updateAnimations(this);
533
490
  }
534
491
  }
535
492
  get hasOwnDuration() {
536
493
  return true;
537
494
  }
538
- get durationMs() {
495
+ get intrinsicDurationMs() {
539
496
  if (!this.trackFragmentIndexLoader.value) {
540
497
  return 0;
541
498
  }
@@ -547,30 +504,18 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
547
504
  if (durations.length === 0) {
548
505
  return 0;
549
506
  }
550
- if (this.sourceInMs && this.sourceOutMs && this.sourceOutMs > this.sourceInMs) {
551
- return Math.max(this.sourceOutMs - this.sourceInMs);
552
- }
553
- if (this.sourceInMs) {
554
- return Math.max(...durations) - this.trimStartMs - this.trimEndMs - this.sourceInMs;
555
- }
556
- if (this.sourceOutMs) {
557
- return Math.max(...durations) - this.trimStartMs - this.trimEndMs - this.sourceOutMs;
558
- }
559
- if (this.sourceInMs && this.sourceOutMs) {
560
- return Math.max(...durations) - this.trimStartMs - this.trimEndMs - this.sourceOutMs - this.sourceInMs;
561
- }
562
- return Math.max(...durations) - this.trimStartMs - this.trimEndMs;
507
+ return Math.max(...durations);
563
508
  }
564
509
  #audioContext;
565
510
  async fetchAudioSpanningTime(fromMs, toMs) {
566
511
  if (this.sourceInMs) {
567
- fromMs -= this.startTimeMs - this.trimStartMs - this.sourceInMs;
512
+ fromMs -= this.startTimeMs - (this.trimStartMs ?? 0) - (this.sourceInMs ?? 0);
568
513
  }
569
514
  if (this.sourceOutMs) {
570
- toMs -= this.startTimeMs - this.trimStartMs - this.sourceOutMs;
515
+ toMs -= this.startTimeMs - (this.trimStartMs ?? 0) - (this.sourceOutMs ?? 0);
571
516
  }
572
- fromMs -= this.startTimeMs - this.trimStartMs;
573
- toMs -= this.startTimeMs - this.trimStartMs;
517
+ fromMs -= this.startTimeMs - (this.trimStartMs ?? 0);
518
+ toMs -= this.startTimeMs - (this.trimStartMs ?? 0);
574
519
  await this.trackFragmentIndexLoader.taskComplete;
575
520
  const audioTrackId = this.defaultAudioTrackId;
576
521
  if (!audioTrackId) {
@@ -624,15 +569,39 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
624
569
  });
625
570
  return {
626
571
  blob: audioBlob,
627
- startMs: firstFragment.dts / audioTrackIndex.timescale * 1e3 - this.trimStartMs,
628
- endMs: lastFragment.dts / audioTrackIndex.timescale * 1e3 + lastFragment.duration / audioTrackIndex.timescale * 1e3 - this.trimEndMs
572
+ startMs: firstFragment.dts / audioTrackIndex.timescale * 1e3 - (this.trimStartMs ?? 0),
573
+ endMs: lastFragment.dts / audioTrackIndex.timescale * 1e3 + lastFragment.duration / audioTrackIndex.timescale * 1e3 - (this.trimEndMs ?? 0)
629
574
  };
630
575
  }
631
- static {
632
- this.MIN_DB = -90;
576
+ set fftSize(value) {
577
+ const oldValue = this.fftSize;
578
+ this.setAttribute("fft-size", String(value));
579
+ this.requestUpdate("fft-size", oldValue);
633
580
  }
634
- static {
635
- this.MAX_DB = -20;
581
+ set fftDecay(value) {
582
+ const oldValue = this.fftDecay;
583
+ this.setAttribute("fft-decay", String(value));
584
+ this.requestUpdate("fft-decay", oldValue);
585
+ }
586
+ get fftSize() {
587
+ return Number.parseInt(this.getAttribute("fft-size") ?? "128", 10);
588
+ }
589
+ get fftDecay() {
590
+ return Number.parseInt(this.getAttribute("fft-decay") ?? "8", 10);
591
+ }
592
+ set interpolateFrequencies(value) {
593
+ const oldValue = this.interpolateFrequencies;
594
+ this.setAttribute("interpolate-frequencies", String(value));
595
+ this.requestUpdate("interpolate-frequencies", oldValue);
596
+ }
597
+ get interpolateFrequencies() {
598
+ return this.getAttribute("interpolate-frequencies") !== "false";
599
+ }
600
+ get shouldInterpolateFrequencies() {
601
+ if (this.hasAttribute("interpolate-frequencies")) {
602
+ return this.getAttribute("interpolate-frequencies") !== "false";
603
+ }
604
+ return false;
636
605
  }
637
606
  static {
638
607
  this.DECAY_WEIGHT = 0.7;
@@ -657,6 +626,14 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
657
626
  }
658
627
  #byteTimeDomainCache;
659
628
  #frequencyDataCache;
629
+ set fftGain(value) {
630
+ const oldValue = this.fftGain;
631
+ this.setAttribute("fft-gain", String(value));
632
+ this.requestUpdate("fft-gain", oldValue);
633
+ }
634
+ get fftGain() {
635
+ return Number.parseFloat(this.getAttribute("fft-gain") ?? "3.0");
636
+ }
660
637
  };
661
638
  __decorateClass([
662
639
  property({ type: Number })
@@ -667,13 +644,53 @@ __decorateClass([
667
644
  __decorateClass([
668
645
  state()
669
646
  ], _EFMedia.prototype, "desiredSeekTimeMs", 2);
670
- __decorateClass([
671
- property({ type: Number })
672
- ], _EFMedia.prototype, "fftSize", 2);
673
- __decorateClass([
674
- property({ type: Number })
675
- ], _EFMedia.prototype, "fftDecay", 2);
676
647
  let EFMedia = _EFMedia;
648
+ function processFFTData(fftData, zeroThresholdPercent = 0.1) {
649
+ const totalBins = fftData.length;
650
+ const zeroThresholdCount = Math.floor(totalBins * zeroThresholdPercent);
651
+ let zeroCount = 0;
652
+ let cutoffIndex = totalBins;
653
+ for (let i = totalBins - 1; i >= 0; i--) {
654
+ if (fftData[i] < 10) {
655
+ zeroCount++;
656
+ } else {
657
+ if (zeroCount >= zeroThresholdCount) {
658
+ cutoffIndex = i + 1;
659
+ break;
660
+ }
661
+ }
662
+ }
663
+ if (cutoffIndex < zeroThresholdCount) {
664
+ return fftData;
665
+ }
666
+ const goodData = fftData.slice(0, cutoffIndex);
667
+ const resampledData = interpolateData(goodData, fftData.length);
668
+ const attenuationStartIndex = Math.floor(totalBins * 0.9);
669
+ for (let i = attenuationStartIndex; i < totalBins; i++) {
670
+ const attenuationProgress = (i - attenuationStartIndex) / (totalBins - attenuationStartIndex) + 0.2;
671
+ const attenuationFactor = Math.max(0, 1 - attenuationProgress);
672
+ resampledData[i] = Math.floor(resampledData[i] * attenuationFactor);
673
+ }
674
+ return resampledData;
675
+ }
676
+ function interpolateData(data, targetSize) {
677
+ const resampled = new Uint8Array(targetSize);
678
+ const dataLength = data.length;
679
+ for (let i = 0; i < targetSize; i++) {
680
+ const ratio = i / (targetSize - 1) * (dataLength - 1);
681
+ const index = Math.floor(ratio);
682
+ const fraction = ratio - index;
683
+ if (index >= dataLength - 1) {
684
+ resampled[i] = data[dataLength - 1];
685
+ } else {
686
+ resampled[i] = Math.round(
687
+ // biome-ignore lint/style/noNonNullAssertion: Manual bounds check
688
+ data[index] * (1 - fraction) + data[index + 1] * fraction
689
+ );
690
+ }
691
+ }
692
+ return resampled;
693
+ }
677
694
  export {
678
695
  EFMedia,
679
696
  deepGetMediaElements
@@ -25,7 +25,8 @@ function EFSourceMixin(superClass, options) {
25
25
  });
26
26
  }
27
27
  get apiHost() {
28
- return this.closest("ef-configuration")?.apiHost ?? this.closest("ef-workbench")?.apiHost ?? this.closest("ef-preview")?.apiHost ?? "https://editframe.dev";
28
+ const apiHost = this.closest("ef-configuration")?.apiHost ?? this.closest("ef-workbench")?.apiHost ?? this.closest("ef-preview")?.apiHost;
29
+ return apiHost || "https://editframe.dev";
29
30
  }
30
31
  productionSrc() {
31
32
  if (!this.md5SumLoader.value) {
@@ -1,10 +1,11 @@
1
1
  import { LitElement } from 'lit';
2
- declare const TestTemporal_base: (new (...args: any[]) => import('./EFTemporal.js').TemporalMixinInterface) & typeof LitElement;
3
- declare class TestTemporal extends TestTemporal_base {
2
+ declare const TenSeconds_base: (new (...args: any[]) => import('./EFTemporal.js').TemporalMixinInterface) & typeof LitElement;
3
+ declare class TenSeconds extends TenSeconds_base {
4
+ get intrinsicDurationMs(): number;
4
5
  }
5
6
  declare global {
6
7
  interface HTMLElementTagNameMap {
7
- "test-temporal": TestTemporal;
8
+ "ten-seconds": TenSeconds;
8
9
  }
9
10
  }
10
11
  export {};
@@ -10,6 +10,7 @@ export declare class TemporalMixinInterface {
10
10
  * Whether the element has a duration set as an attribute.
11
11
  */
12
12
  get hasExplicitDuration(): boolean;
13
+ get sourceStartMs(): number;
13
14
  /**
14
15
  * Used to trim the start of the media.
15
16
  *
@@ -19,7 +20,7 @@ export declare class TemporalMixinInterface {
19
20
  *
20
21
  * @domAttribute "trimstart"
21
22
  */
22
- get trimStartMs(): number;
23
+ get trimStartMs(): number | undefined;
23
24
  /**
24
25
  * Used to trim the end of the media.
25
26
  *
@@ -30,10 +31,10 @@ export declare class TemporalMixinInterface {
30
31
  * @domAttribute "trimend"
31
32
  */
32
33
  get trimEndMs(): number;
33
- set trimStartMs(value: number);
34
- set trimEndMs(value: number);
35
- set trimstart(value: string);
36
- set trimend(value: string);
34
+ set trimStartMs(value: number | undefined);
35
+ set trimEndMs(value: number | undefined);
36
+ set trimstart(value: string | undefined);
37
+ set trimend(value: string | undefined);
37
38
  /**
38
39
  * The source in time of the element.
39
40
  *
@@ -53,7 +54,7 @@ export declare class TemporalMixinInterface {
53
54
  *
54
55
  * @domAttribute "sourcein"
55
56
  */
56
- get sourceInMs(): number;
57
+ get sourceInMs(): number | undefined;
57
58
  /**
58
59
  * The source out time of the element.
59
60
  *
@@ -75,15 +76,17 @@ export declare class TemporalMixinInterface {
75
76
  *
76
77
  * @domAttribute "sourceout"
77
78
  */
78
- get sourceOutMs(): number;
79
- set sourceInMs(value: number);
80
- set sourceOutMs(value: number);
81
- set sourcein(value: string);
82
- set sourceout(value: string);
79
+ get sourceOutMs(): number | undefined;
80
+ set sourceInMs(value: number | undefined);
81
+ set sourceOutMs(value: number | undefined);
82
+ set sourcein(value: string | undefined);
83
+ set sourceout(value: string | undefined);
83
84
  /**
84
85
  * @domAttribute "duration"
85
86
  */
86
87
  get durationMs(): number;
88
+ get explicitDurationMs(): number | undefined;
89
+ get intrinsicDurationMs(): number | undefined;
87
90
  /**
88
91
  * The start time of the element within its root timegroup in milliseconds.
89
92
  *