@editframe/elements 0.15.0-beta.12 → 0.15.0-beta.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -64,10 +64,10 @@ export declare class EFMedia extends EFMedia_base {
64
64
  startMs: number;
65
65
  endMs: number;
66
66
  } | undefined>;
67
- fftSize: number;
68
- fftDecay: number;
69
- private static readonly MIN_DB;
70
- private static readonly MAX_DB;
67
+ set fftSize(value: number);
68
+ set fftDecay(value: number);
69
+ get fftSize(): number;
70
+ get fftDecay(): number;
71
71
  private static readonly DECAY_WEIGHT;
72
72
  get FREQ_WEIGHTS(): Float32Array;
73
73
  byteTimeDomainTask: Task<readonly [import('@lit/task').TaskStatus, number, number, number], Uint8Array | null>;
@@ -247,8 +247,6 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
247
247
  };
248
248
  }
249
249
  });
250
- this.fftSize = 512;
251
- this.fftDecay = 8;
252
250
  this.#byteTimeDomainCache = new LRUCache(100);
253
251
  this.byteTimeDomainTask = new Task(this, {
254
252
  autoRun: EF_INTERACTIVE,
@@ -266,53 +264,61 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
266
264
  const startOffsetMs = this.audioBufferTask.value.startOffsetMs;
267
265
  const audioBuffer = this.audioBufferTask.value.buffer;
268
266
  const smoothedKey = `${this.fftSize}:${this.fftDecay}:${startOffsetMs}:${currentTimeMs}`;
269
- const cachedSmoothedData = this.#byteTimeDomainCache.get(smoothedKey);
270
- if (cachedSmoothedData) {
271
- return cachedSmoothedData;
272
- }
267
+ const cachedData = this.#byteTimeDomainCache.get(smoothedKey);
268
+ if (cachedData) return cachedData;
273
269
  const framesData = await Promise.all(
274
- Array.from({ length: this.fftDecay }, async (_, i) => {
275
- const frameOffset = i * (1e3 / 30);
270
+ Array.from({ length: this.fftDecay }, async (_, frameIndex) => {
271
+ const frameOffset = frameIndex * (1e3 / 30);
276
272
  const startTime = Math.max(
277
273
  0,
278
274
  (currentTimeMs - frameOffset - startOffsetMs) / 1e3
279
275
  );
280
276
  const cacheKey = `${this.fftSize}:${startOffsetMs}:${startTime}`;
281
277
  const cachedFrame = this.#byteTimeDomainCache.get(cacheKey);
282
- if (cachedFrame) {
283
- return cachedFrame;
284
- }
278
+ if (cachedFrame) return cachedFrame;
285
279
  const audioContext = new OfflineAudioContext(
286
280
  2,
287
281
  48e3 * (1 / 30),
288
282
  48e3
289
283
  );
284
+ const source = audioContext.createBufferSource();
285
+ source.buffer = audioBuffer;
290
286
  const analyser = audioContext.createAnalyser();
291
287
  analyser.fftSize = this.fftSize;
288
+ analyser.minDecibels = -90;
289
+ analyser.maxDecibels = -20;
292
290
  const gainNode = audioContext.createGain();
293
291
  gainNode.gain.value = 10;
294
- analyser.smoothingTimeConstant = 0.4;
295
- analyser.minDecibels = -90;
296
- analyser.maxDecibels = -10;
297
- const audioBufferSource = audioContext.createBufferSource();
298
- audioBufferSource.buffer = audioBuffer;
299
- const filter = audioContext.createBiquadFilter();
300
- filter.type = "bandpass";
301
- filter.frequency.value = 1e3;
302
- filter.Q.value = 0.5;
303
- audioBufferSource.connect(gainNode);
304
- gainNode.connect(filter);
305
- filter.connect(analyser);
292
+ source.connect(gainNode);
293
+ gainNode.connect(analyser);
306
294
  analyser.connect(audioContext.destination);
307
- audioBufferSource.start(0, startTime, 1 / 30);
295
+ source.start(0, startTime, 1 / 30);
296
+ const dataLength = analyser.fftSize / 2;
308
297
  try {
309
298
  await audioContext.startRendering();
310
- const frameData = new Uint8Array(analyser.fftSize);
299
+ const frameData = new Uint8Array(dataLength);
311
300
  analyser.getByteTimeDomainData(frameData);
312
- this.#byteTimeDomainCache.set(cacheKey, frameData);
313
- return frameData;
301
+ const points = new Uint8Array(dataLength);
302
+ for (let i = 0; i < dataLength; i++) {
303
+ const pointSamples = frameData.slice(
304
+ i * (frameData.length / dataLength),
305
+ (i + 1) * (frameData.length / dataLength)
306
+ );
307
+ const rms = Math.sqrt(
308
+ pointSamples.reduce((sum, sample) => {
309
+ const normalized = (sample - 128) / 128;
310
+ return sum + normalized * normalized;
311
+ }, 0) / pointSamples.length
312
+ );
313
+ const avgSign = Math.sign(
314
+ pointSamples.reduce((sum, sample) => sum + (sample - 128), 0)
315
+ );
316
+ points[i] = Math.min(255, Math.round(128 + avgSign * rms * 128));
317
+ }
318
+ this.#byteTimeDomainCache.set(cacheKey, points);
319
+ return points;
314
320
  } finally {
315
- audioBufferSource.disconnect();
321
+ source.disconnect();
316
322
  analyser.disconnect();
317
323
  }
318
324
  })
@@ -324,15 +330,12 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
324
330
  let weightSum = 0;
325
331
  framesData.forEach((frame, frameIndex) => {
326
332
  const decayWeight = _EFMedia2.DECAY_WEIGHT ** frameIndex;
327
- weightedSum += frame[i] * decayWeight;
333
+ weightedSum += (frame[i] ?? 0) * decayWeight;
328
334
  weightSum += decayWeight;
329
335
  });
330
336
  smoothedData[i] = Math.min(255, Math.round(weightedSum / weightSum));
331
337
  }
332
- this.#byteTimeDomainCache.set(
333
- smoothedKey,
334
- smoothedData.slice(0, Math.floor(smoothedData.length * 0.8))
335
- );
338
+ this.#byteTimeDomainCache.set(smoothedKey, smoothedData);
336
339
  return smoothedData;
337
340
  }
338
341
  });
@@ -378,11 +381,19 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
378
381
  );
379
382
  const analyser = audioContext.createAnalyser();
380
383
  analyser.fftSize = this.fftSize;
381
- analyser.minDecibels = _EFMedia2.MIN_DB;
382
- analyser.maxDecibels = _EFMedia2.MAX_DB;
384
+ analyser.minDecibels = -90;
385
+ analyser.maxDecibels = -10;
386
+ const gainNode = audioContext.createGain();
387
+ gainNode.gain.value = 5;
388
+ const filter = audioContext.createBiquadFilter();
389
+ filter.type = "bandpass";
390
+ filter.frequency.value = 15e3;
391
+ filter.Q.value = 0.05;
383
392
  const audioBufferSource = audioContext.createBufferSource();
384
393
  audioBufferSource.buffer = audioBuffer;
385
- audioBufferSource.connect(analyser);
394
+ audioBufferSource.connect(gainNode);
395
+ gainNode.connect(filter);
396
+ filter.connect(analyser);
386
397
  analyser.connect(audioContext.destination);
387
398
  audioBufferSource.start(0, startTime, 1 / 30);
388
399
  try {
@@ -633,11 +644,17 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
633
644
  endMs: lastFragment.dts / audioTrackIndex.timescale * 1e3 + lastFragment.duration / audioTrackIndex.timescale * 1e3 - this.trimEndMs
634
645
  };
635
646
  }
636
- static {
637
- this.MIN_DB = -90;
647
+ set fftSize(value) {
648
+ this.setAttribute("fft-size", String(value));
638
649
  }
639
- static {
640
- this.MAX_DB = -20;
650
+ set fftDecay(value) {
651
+ this.setAttribute("fft-decay", String(value));
652
+ }
653
+ get fftSize() {
654
+ return Number.parseInt(this.getAttribute("fft-size") ?? "128", 10);
655
+ }
656
+ get fftDecay() {
657
+ return Number.parseInt(this.getAttribute("fft-decay") ?? "8", 10);
641
658
  }
642
659
  static {
643
660
  this.DECAY_WEIGHT = 0.7;
@@ -672,12 +689,6 @@ __decorateClass([
672
689
  __decorateClass([
673
690
  state()
674
691
  ], _EFMedia.prototype, "desiredSeekTimeMs", 2);
675
- __decorateClass([
676
- property({ type: Number })
677
- ], _EFMedia.prototype, "fftSize", 2);
678
- __decorateClass([
679
- property({ type: Number })
680
- ], _EFMedia.prototype, "fftDecay", 2);
681
692
  let EFMedia = _EFMedia;
682
693
  export {
683
694
  EFMedia,
@@ -13,7 +13,7 @@ export declare class EFWaveform extends EFWaveform_base {
13
13
  private resizeObserver?;
14
14
  private mutationObserver?;
15
15
  render(): import('lit-html').TemplateResult<1>;
16
- mode: "roundBars" | "bars" | "bricks" | "line" | "curve" | "pixel" | "wave" | "spikes";
16
+ mode: "roundBars" | "bars" | "bricks" | "line" | "curve" | "pixel" | "wave";
17
17
  color: string;
18
18
  target: string;
19
19
  targetElement: EFAudio | EFVideo | null;
@@ -26,12 +26,10 @@ export declare class EFWaveform extends EFWaveform_base {
26
26
  protected drawBars(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
27
27
  protected drawBricks(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
28
28
  protected drawRoundBars(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
29
- protected drawEqualizer(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
30
29
  protected drawLine(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
31
30
  protected drawCurve(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
32
31
  protected drawPixel(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
33
32
  protected drawWave(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
34
- protected drawSpikes(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
35
33
  frameTask: Task<readonly [EFAudio | EFVideo | null, Uint8Array | null | undefined], void>;
36
34
  get durationMs(): number;
37
35
  protected updated(changedProperties: PropertyValueMap<this>): void;
@@ -60,10 +60,10 @@ let EFWaveform = class extends EFTemporal(TWMixin(LitElement)) {
60
60
  }
61
61
  switch (this.mode) {
62
62
  case "bars":
63
- this.drawBars(ctx, frequencyData);
63
+ this.drawBars(ctx, byteTimeData);
64
64
  break;
65
65
  case "bricks":
66
- this.drawBricks(ctx, frequencyData);
66
+ this.drawBricks(ctx, byteTimeData);
67
67
  break;
68
68
  case "line":
69
69
  this.drawLine(ctx, byteTimeData);
@@ -72,16 +72,13 @@ let EFWaveform = class extends EFTemporal(TWMixin(LitElement)) {
72
72
  this.drawCurve(ctx, byteTimeData);
73
73
  break;
74
74
  case "pixel":
75
- this.drawPixel(ctx, frequencyData);
75
+ this.drawPixel(ctx, byteTimeData);
76
76
  break;
77
77
  case "wave":
78
- this.drawWave(ctx, frequencyData);
79
- break;
80
- case "spikes":
81
- this.drawSpikes(ctx, frequencyData);
78
+ this.drawWave(ctx, byteTimeData);
82
79
  break;
83
80
  case "roundBars":
84
- this.drawRoundBars(ctx, frequencyData);
81
+ this.drawRoundBars(ctx, byteTimeData);
85
82
  break;
86
83
  }
87
84
  ctx.restore();
@@ -159,7 +156,7 @@ let EFWaveform = class extends EFTemporal(TWMixin(LitElement)) {
159
156
  ctx.clearRect(0, 0, waveWidth, waveHeight);
160
157
  const path = new Path2D();
161
158
  frequencyData.forEach((value, i) => {
162
- const normalizedValue = Math.min(value / 255 * 2, 1);
159
+ const normalizedValue = Math.abs(value - 128) / 128;
163
160
  const barHeight = normalizedValue * waveHeight;
164
161
  const y = (waveHeight - barHeight) / 2;
165
162
  const x = waveWidth * paddingOuter + i * (barWidth * (1 + paddingInner));
@@ -178,7 +175,7 @@ let EFWaveform = class extends EFTemporal(TWMixin(LitElement)) {
178
175
  const verticalGap = boxSize * 0.2;
179
176
  const maxBricks = Math.floor(waveHeight / (boxSize + verticalGap));
180
177
  frequencyData.forEach((value, i) => {
181
- const normalizedValue = Math.min(value / 255 * 2, 1);
178
+ const normalizedValue = Math.abs(value - 128) / 128;
182
179
  const brickCount = Math.floor(normalizedValue * maxBricks);
183
180
  for (let j = 0; j < brickCount; j++) {
184
181
  const x = columnWidth * i;
@@ -200,7 +197,7 @@ let EFWaveform = class extends EFTemporal(TWMixin(LitElement)) {
200
197
  ctx.clearRect(0, 0, waveWidth, waveHeight);
201
198
  const path = new Path2D();
202
199
  frequencyData.forEach((value, i) => {
203
- const normalizedValue = Math.min(value / 255 * 2, 1);
200
+ const normalizedValue = Math.abs(value - 128) / 128;
204
201
  const height = normalizedValue * waveHeight;
205
202
  const x = waveWidth * paddingOuter + i * (barWidth * (1 + paddingInner));
206
203
  const y = (waveHeight - height) / 2;
@@ -208,34 +205,13 @@ let EFWaveform = class extends EFTemporal(TWMixin(LitElement)) {
208
205
  });
209
206
  ctx.fill(path);
210
207
  }
211
- drawEqualizer(ctx, frequencyData) {
212
- const canvas = ctx.canvas;
213
- const waveWidth = canvas.width;
214
- const waveHeight = canvas.height;
215
- const baseline = waveHeight / 2;
216
- const barWidth = waveWidth / frequencyData.length * 0.8;
217
- ctx.clearRect(0, 0, waveWidth, waveHeight);
218
- const baselinePath = new Path2D();
219
- const barsPath = new Path2D();
220
- baselinePath.moveTo(0, baseline);
221
- baselinePath.lineTo(waveWidth, baseline);
222
- frequencyData.forEach((value, i) => {
223
- const height = value / 255 * (waveHeight / 2);
224
- const x = i * (waveWidth / frequencyData.length);
225
- const y = baseline - height;
226
- barsPath.rect(x, y, barWidth, Math.max(height * 2, 1));
227
- });
228
- ctx.lineWidth = 2;
229
- ctx.stroke(baselinePath);
230
- ctx.fill(barsPath);
231
- }
232
208
  drawLine(ctx, frequencyData) {
233
209
  const canvas = ctx.canvas;
234
210
  const waveWidth = canvas.width;
235
211
  const waveHeight = canvas.height;
236
212
  ctx.clearRect(0, 0, waveWidth, waveHeight);
237
213
  const path = new Path2D();
238
- const sampleRate = 4;
214
+ const sampleRate = 1;
239
215
  for (let i = 0; i < frequencyData.length; i += sampleRate) {
240
216
  const x = i / frequencyData.length * waveWidth;
241
217
  const y = (1 - (frequencyData[i] ?? 0) / 255) * waveHeight;
@@ -282,7 +258,7 @@ let EFWaveform = class extends EFTemporal(TWMixin(LitElement)) {
282
258
  ctx.clearRect(0, 0, waveWidth, waveHeight);
283
259
  const path = new Path2D();
284
260
  frequencyData.forEach((value, i) => {
285
- const normalizedValue = Math.min(value / 255 * 2, 1);
261
+ const normalizedValue = Math.abs(value - 128) / 128;
286
262
  const x = i * (waveWidth / frequencyData.length);
287
263
  const barHeight = normalizedValue * (waveHeight / 2);
288
264
  const y = baseline - barHeight;
@@ -299,99 +275,40 @@ let EFWaveform = class extends EFTemporal(TWMixin(LitElement)) {
299
275
  const startX = waveWidth * paddingOuter;
300
276
  ctx.clearRect(0, 0, waveWidth, waveHeight);
301
277
  const path = new Path2D();
302
- const firstValue = Math.min((frequencyData[0] ?? 0) / 255 * 2, 1);
303
- const firstY = (waveHeight - firstValue * waveHeight) / 2;
304
- path.moveTo(startX, firstY);
305
- frequencyData.forEach((value, i) => {
306
- const normalizedValue = Math.min(value / 255 * 2, 1);
307
- const x = startX + i / (frequencyData.length - 1) * availableWidth;
308
- const barHeight = normalizedValue * waveHeight;
309
- const y = (waveHeight - barHeight) / 2;
310
- if (i === 0) {
311
- path.moveTo(x, y);
312
- } else {
313
- const prevX = startX + (i - 1) / (frequencyData.length - 1) * availableWidth;
314
- const prevValue = Math.min((frequencyData[i - 1] ?? 0) / 255 * 2, 1);
315
- const prevBarHeight = prevValue * waveHeight;
316
- const prevY = (waveHeight - prevBarHeight) / 2;
317
- const xc = (prevX + x) / 2;
318
- const yc = (prevY + y) / 2;
319
- path.quadraticCurveTo(prevX, prevY, xc, yc);
320
- }
321
- });
322
- for (let i = frequencyData.length - 1; i >= 0; i--) {
323
- const normalizedValue = Math.min((frequencyData[i] ?? 0) / 255 * 2, 1);
324
- const x = startX + i / (frequencyData.length - 1) * availableWidth;
325
- const barHeight = normalizedValue * waveHeight;
326
- const y = (waveHeight + barHeight) / 2;
327
- if (i === frequencyData.length - 1) {
328
- path.lineTo(x, y);
329
- } else {
330
- const nextX = startX + (i + 1) / (frequencyData.length - 1) * availableWidth;
331
- const nextValue = Math.min((frequencyData[i + 1] ?? 0) / 255 * 2, 1);
332
- const nextBarHeight = nextValue * waveHeight;
333
- const nextY = (waveHeight + nextBarHeight) / 2;
334
- const xc = (nextX + x) / 2;
335
- const yc = (nextY + y) / 2;
336
- path.quadraticCurveTo(nextX, nextY, xc, yc);
337
- }
338
- }
339
- const lastY = (waveHeight + firstValue * waveHeight) / 2;
340
- const controlX = startX;
341
- const controlY = (lastY + firstY) / 2;
342
- path.quadraticCurveTo(controlX, controlY, startX, firstY);
343
- ctx.fill(path);
344
- }
345
- drawSpikes(ctx, frequencyData) {
346
- const canvas = ctx.canvas;
347
- const waveWidth = canvas.width;
348
- const waveHeight = canvas.height;
349
- const paddingOuter = 0.01;
350
- const availableWidth = waveWidth * (1 - 2 * paddingOuter);
351
- const startX = waveWidth * paddingOuter;
352
- ctx.clearRect(0, 0, waveWidth, waveHeight);
353
- const path = new Path2D();
354
- const firstValue = (frequencyData[0] ?? 0) / 255;
355
- const firstY = (waveHeight - firstValue * waveHeight) / 2;
278
+ const firstValue = ((frequencyData[0] ?? 128) - 128) / 128;
279
+ const firstY = waveHeight / 2 + firstValue * waveHeight / 2;
356
280
  path.moveTo(startX, firstY);
357
281
  frequencyData.forEach((value, i) => {
358
- const normalizedValue = Math.min(value / 255 * 2, 1);
282
+ const normalizedValue = (value - 128) / 128;
359
283
  const x = startX + i / (frequencyData.length - 1) * availableWidth;
360
- const barHeight = normalizedValue * (waveHeight / 2);
361
- const y = (waveHeight - barHeight * 2) / 2;
284
+ const y = waveHeight / 2 - normalizedValue * waveHeight / 2;
362
285
  if (i === 0) {
363
286
  path.moveTo(x, y);
364
287
  } else {
365
288
  const prevX = startX + (i - 1) / (frequencyData.length - 1) * availableWidth;
366
- const prevValue = (frequencyData[i - 1] ?? 0) / 255;
367
- const prevBarHeight = prevValue * (waveHeight / 2);
368
- const prevY = (waveHeight - prevBarHeight * 2) / 2;
289
+ const prevValue = ((frequencyData[i - 1] ?? 128) - 128) / 128;
290
+ const prevY = waveHeight / 2 - prevValue * waveHeight / 2;
369
291
  const xc = (prevX + x) / 2;
370
292
  const yc = (prevY + y) / 2;
371
293
  path.quadraticCurveTo(prevX, prevY, xc, yc);
372
294
  }
373
295
  });
374
296
  for (let i = frequencyData.length - 1; i >= 0; i--) {
375
- const normalizedValue = Math.min((frequencyData[i] ?? 0) / 255 * 2, 1);
297
+ const normalizedValue = ((frequencyData[i] ?? 128) - 128) / 128;
376
298
  const x = startX + i / (frequencyData.length - 1) * availableWidth;
377
- const barHeight = normalizedValue * (waveHeight / 2);
378
- const y = (waveHeight + barHeight * 2) / 2;
299
+ const y = waveHeight / 2 + normalizedValue * waveHeight / 2;
379
300
  if (i === frequencyData.length - 1) {
380
301
  path.lineTo(x, y);
381
302
  } else {
382
303
  const nextX = startX + (i + 1) / (frequencyData.length - 1) * availableWidth;
383
- const nextValue = (frequencyData[i + 1] ?? 0) / 255;
384
- const nextBarHeight = nextValue * (waveHeight / 2);
385
- const nextY = (waveHeight + nextBarHeight * 2) / 2;
304
+ const nextValue = ((frequencyData[i + 1] ?? 128) - 128) / 128;
305
+ const nextY = waveHeight / 2 + nextValue * waveHeight / 2;
386
306
  const xc = (nextX + x) / 2;
387
307
  const yc = (nextY + y) / 2;
388
308
  path.quadraticCurveTo(nextX, nextY, xc, yc);
389
309
  }
390
310
  }
391
- const lastY = (waveHeight + firstValue * waveHeight) / 2;
392
- const controlX = startX;
393
- const controlY = (lastY + firstY) / 2;
394
- path.quadraticCurveTo(controlX, controlY, startX, firstY);
311
+ path.closePath();
395
312
  ctx.fill(path);
396
313
  }
397
314
  get durationMs() {
@@ -20,10 +20,14 @@ function TWMixin(Base) {
20
20
  }
21
21
  const constructorStylesheets = [];
22
22
  const constructorStyles = "styles" in this.constructor && this.constructor.styles || [];
23
- for (const item of constructorStyles) {
24
- if (item.styleSheet) {
25
- constructorStylesheets.push(item.styleSheet);
23
+ if (Array.isArray(constructorStyles)) {
24
+ for (const item of constructorStyles) {
25
+ if (item.styleSheet) {
26
+ constructorStylesheets.push(item.styleSheet);
27
+ }
26
28
  }
29
+ } else if (constructorStyles.styleSheet) {
30
+ constructorStylesheets.push(constructorStyles.styleSheet);
27
31
  }
28
32
  if (renderRoot?.adoptedStyleSheets) {
29
33
  renderRoot.adoptedStyleSheets = [
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@editframe/elements",
3
- "version": "0.15.0-beta.12",
3
+ "version": "0.15.0-beta.14",
4
4
  "description": "",
5
5
  "exports": {
6
6
  ".": {
@@ -27,7 +27,7 @@
27
27
  "license": "UNLICENSED",
28
28
  "dependencies": {
29
29
  "@bramus/style-observer": "^1.3.0",
30
- "@editframe/assets": "0.15.0-beta.12",
30
+ "@editframe/assets": "0.15.0-beta.14",
31
31
  "@lit/context": "^1.1.2",
32
32
  "@lit/task": "^1.0.1",
33
33
  "d3": "^7.9.0",
@@ -590,14 +590,22 @@ export class EFMedia extends EFTargetable(
590
590
  };
591
591
  }
592
592
 
593
- @property({ type: Number })
594
- fftSize = 512; // Default FFT size
593
+ set fftSize(value: number) {
594
+ this.setAttribute("fft-size", String(value));
595
+ }
595
596
 
596
- @property({ type: Number })
597
- fftDecay = 8; // Default number of frames to analyze
597
+ set fftDecay(value: number) {
598
+ this.setAttribute("fft-decay", String(value));
599
+ }
600
+
601
+ get fftSize() {
602
+ return Number.parseInt(this.getAttribute("fft-size") ?? "128", 10);
603
+ }
604
+
605
+ get fftDecay() {
606
+ return Number.parseInt(this.getAttribute("fft-decay") ?? "8", 10);
607
+ }
598
608
 
599
- private static readonly MIN_DB = -90;
600
- private static readonly MAX_DB = -20;
601
609
  private static readonly DECAY_WEIGHT = 0.7;
602
610
 
603
611
  // Update FREQ_WEIGHTS to use the instance fftSize instead of a static value
@@ -641,16 +649,15 @@ export class EFMedia extends EFTargetable(
641
649
  const currentTimeMs = this.currentSourceTimeMs;
642
650
  const startOffsetMs = this.audioBufferTask.value.startOffsetMs;
643
651
  const audioBuffer = this.audioBufferTask.value.buffer;
644
- const smoothedKey = `${this.fftSize}:${this.fftDecay}:${startOffsetMs}:${currentTimeMs}`;
645
652
 
646
- const cachedSmoothedData = this.#byteTimeDomainCache.get(smoothedKey);
647
- if (cachedSmoothedData) {
648
- return cachedSmoothedData;
649
- }
653
+ const smoothedKey = `${this.fftSize}:${this.fftDecay}:${startOffsetMs}:${currentTimeMs}`;
654
+ const cachedData = this.#byteTimeDomainCache.get(smoothedKey);
655
+ if (cachedData) return cachedData;
650
656
 
657
+ // Process multiple frames with decay, similar to the reference code
651
658
  const framesData = await Promise.all(
652
- Array.from({ length: this.fftDecay }, async (_, i) => {
653
- const frameOffset = i * (1000 / 30);
659
+ Array.from({ length: this.fftDecay }, async (_, frameIndex) => {
660
+ const frameOffset = frameIndex * (1000 / 30);
654
661
  const startTime = Math.max(
655
662
  0,
656
663
  (currentTimeMs - frameOffset - startOffsetMs) / 1000,
@@ -658,83 +665,91 @@ export class EFMedia extends EFTargetable(
658
665
 
659
666
  const cacheKey = `${this.fftSize}:${startOffsetMs}:${startTime}`;
660
667
  const cachedFrame = this.#byteTimeDomainCache.get(cacheKey);
661
- if (cachedFrame) {
662
- return cachedFrame;
663
- }
668
+ if (cachedFrame) return cachedFrame;
664
669
 
665
670
  const audioContext = new OfflineAudioContext(
666
671
  2,
667
672
  48000 * (1 / 30),
668
673
  48000,
669
674
  );
670
- const analyser = audioContext.createAnalyser();
671
- analyser.fftSize = this.fftSize;
672
675
 
673
- // Increase gain even more for better signal
674
- const gainNode = audioContext.createGain();
675
- gainNode.gain.value = 10.0; // Try a higher gain
676
+ const source = audioContext.createBufferSource();
677
+ source.buffer = audioBuffer;
676
678
 
677
- // More aggressive settings for the analyzer
678
- analyser.smoothingTimeConstant = 0.4;
679
+ // Create analyzer for PCM data
680
+ const analyser = audioContext.createAnalyser();
681
+ analyser.fftSize = this.fftSize; // Ensure enough samples
679
682
  analyser.minDecibels = -90;
680
- analyser.maxDecibels = -10;
681
-
682
- const audioBufferSource = audioContext.createBufferSource();
683
- audioBufferSource.buffer = audioBuffer;
683
+ analyser.maxDecibels = -20;
684
684
 
685
- // Add a bandpass filter to focus on the most active frequency ranges
686
- const filter = audioContext.createBiquadFilter();
687
- filter.type = "bandpass";
688
- filter.frequency.value = 1000; // Center frequency in Hz
689
- filter.Q.value = 0.5; // Width of the band
685
+ const gainNode = audioContext.createGain();
686
+ gainNode.gain.value = 10.0; // Amplify the signal
690
687
 
691
- audioBufferSource.connect(gainNode);
692
- gainNode.connect(filter);
693
- filter.connect(analyser);
688
+ source.connect(gainNode);
689
+ gainNode.connect(analyser);
694
690
  analyser.connect(audioContext.destination);
695
691
 
696
- audioBufferSource.start(0, startTime, 1 / 30);
692
+ source.start(0, startTime, 1 / 30);
697
693
 
694
+ const dataLength = analyser.fftSize / 2;
698
695
  try {
699
696
  await audioContext.startRendering();
700
- // Change to time domain data
701
- const frameData = new Uint8Array(analyser.fftSize);
697
+ const frameData = new Uint8Array(dataLength);
702
698
  analyser.getByteTimeDomainData(frameData);
703
699
 
704
- this.#byteTimeDomainCache.set(cacheKey, frameData);
705
- return frameData;
700
+ // const points = frameData;
701
+ // Calculate RMS and midpoint values
702
+ const points = new Uint8Array(dataLength);
703
+ for (let i = 0; i < dataLength; i++) {
704
+ const pointSamples = frameData.slice(
705
+ i * (frameData.length / dataLength),
706
+ (i + 1) * (frameData.length / dataLength),
707
+ );
708
+
709
+ // Calculate RMS while preserving sign
710
+ const rms = Math.sqrt(
711
+ pointSamples.reduce((sum, sample) => {
712
+ const normalized = (sample - 128) / 128;
713
+ return sum + normalized * normalized;
714
+ }, 0) / pointSamples.length,
715
+ );
716
+
717
+ // Get average sign of the samples to determine direction
718
+ const avgSign = Math.sign(
719
+ pointSamples.reduce((sum, sample) => sum + (sample - 128), 0),
720
+ );
721
+
722
+ // Convert RMS back to byte range, preserving direction
723
+ points[i] = Math.min(255, Math.round(128 + avgSign * rms * 128));
724
+ }
725
+
726
+ this.#byteTimeDomainCache.set(cacheKey, points);
727
+ return points;
706
728
  } finally {
707
- audioBufferSource.disconnect();
729
+ source.disconnect();
708
730
  analyser.disconnect();
709
731
  }
710
732
  }),
711
733
  );
712
734
 
735
+ // Combine frames with decay weighting
713
736
  const frameLength = framesData[0]?.length ?? 0;
714
737
  const smoothedData = new Uint8Array(frameLength);
715
738
 
716
- // Combine frames with decay
717
739
  for (let i = 0; i < frameLength; i++) {
718
740
  let weightedSum = 0;
719
741
  let weightSum = 0;
720
742
 
721
743
  framesData.forEach((frame, frameIndex) => {
722
744
  const decayWeight = EFMedia.DECAY_WEIGHT ** frameIndex;
723
- // biome-ignore lint/style/noNonNullAssertion: Will exist due to forEach
724
- weightedSum += frame[i]! * decayWeight;
745
+ weightedSum += (frame[i] ?? 0) * decayWeight;
725
746
  weightSum += decayWeight;
726
747
  });
727
748
 
728
749
  smoothedData[i] = Math.min(255, Math.round(weightedSum / weightSum));
729
750
  }
730
751
 
731
- // Remove frequency weighting since we're using time domain data
732
- // No need to slice the data either since we want the full waveform
733
-
734
- this.#byteTimeDomainCache.set(
735
- smoothedKey,
736
- smoothedData.slice(0, Math.floor(smoothedData.length * 0.8)),
737
- );
752
+ this.#byteTimeDomainCache.set(smoothedKey, smoothedData);
738
753
  return smoothedData;
739
754
  },
740
755
  });
@@ -789,13 +804,24 @@ export class EFMedia extends EFTargetable(
789
804
  );
790
805
  const analyser = audioContext.createAnalyser();
791
806
  analyser.fftSize = this.fftSize;
792
- analyser.minDecibels = EFMedia.MIN_DB;
793
- analyser.maxDecibels = EFMedia.MAX_DB;
807
+ analyser.minDecibels = -90;
808
+ analyser.maxDecibels = -10;
809
+ // analyser.smoothingTimeConstant = 0.4;
810
+
811
+ const gainNode = audioContext.createGain();
812
+ gainNode.gain.value = 5.0;
813
+
814
+ const filter = audioContext.createBiquadFilter();
815
+ filter.type = "bandpass";
816
+ filter.frequency.value = 15000;
817
+ filter.Q.value = 0.05;
794
818
 
795
819
  const audioBufferSource = audioContext.createBufferSource();
796
820
  audioBufferSource.buffer = audioBuffer;
797
821
 
798
- audioBufferSource.connect(analyser);
822
+ audioBufferSource.connect(gainNode);
823
+ gainNode.connect(filter);
824
+ filter.connect(analyser);
799
825
  analyser.connect(audioContext.destination);
800
826
 
801
827
  audioBufferSource.start(0, startTime, 1 / 30);