@editframe/elements 0.15.0-beta.13 → 0.15.0-beta.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/elements/EFMedia.d.ts +5 -4
- package/dist/elements/EFMedia.js +111 -49
- package/dist/elements/EFWaveform.d.ts +0 -1
- package/dist/elements/EFWaveform.js +5 -26
- package/package.json +2 -2
- package/src/elements/EFMedia.ts +156 -58
- package/src/elements/EFWaveform.ts +5 -42
- package/types.json +1 -1
|
@@ -64,10 +64,11 @@ export declare class EFMedia extends EFMedia_base {
|
|
|
64
64
|
startMs: number;
|
|
65
65
|
endMs: number;
|
|
66
66
|
} | undefined>;
|
|
67
|
-
fftSize: number;
|
|
68
|
-
fftDecay: number;
|
|
69
|
-
|
|
70
|
-
|
|
67
|
+
set fftSize(value: number);
|
|
68
|
+
set fftDecay(value: number);
|
|
69
|
+
get fftSize(): number;
|
|
70
|
+
get fftDecay(): number;
|
|
71
|
+
get shouldInterpolateFrequencies(): boolean;
|
|
71
72
|
private static readonly DECAY_WEIGHT;
|
|
72
73
|
get FREQ_WEIGHTS(): Float32Array;
|
|
73
74
|
byteTimeDomainTask: Task<readonly [import('@lit/task').TaskStatus, number, number, number], Uint8Array | null>;
|
package/dist/elements/EFMedia.js
CHANGED
|
@@ -247,8 +247,6 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
|
|
|
247
247
|
};
|
|
248
248
|
}
|
|
249
249
|
});
|
|
250
|
-
this.fftSize = 512;
|
|
251
|
-
this.fftDecay = 8;
|
|
252
250
|
this.#byteTimeDomainCache = new LRUCache(100);
|
|
253
251
|
this.byteTimeDomainTask = new Task(this, {
|
|
254
252
|
autoRun: EF_INTERACTIVE,
|
|
@@ -266,53 +264,61 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
|
|
|
266
264
|
const startOffsetMs = this.audioBufferTask.value.startOffsetMs;
|
|
267
265
|
const audioBuffer = this.audioBufferTask.value.buffer;
|
|
268
266
|
const smoothedKey = `${this.fftSize}:${this.fftDecay}:${startOffsetMs}:${currentTimeMs}`;
|
|
269
|
-
const
|
|
270
|
-
if (
|
|
271
|
-
return cachedSmoothedData;
|
|
272
|
-
}
|
|
267
|
+
const cachedData = this.#byteTimeDomainCache.get(smoothedKey);
|
|
268
|
+
if (cachedData) return cachedData;
|
|
273
269
|
const framesData = await Promise.all(
|
|
274
|
-
Array.from({ length: this.fftDecay }, async (_,
|
|
275
|
-
const frameOffset =
|
|
270
|
+
Array.from({ length: this.fftDecay }, async (_, frameIndex) => {
|
|
271
|
+
const frameOffset = frameIndex * (1e3 / 30);
|
|
276
272
|
const startTime = Math.max(
|
|
277
273
|
0,
|
|
278
274
|
(currentTimeMs - frameOffset - startOffsetMs) / 1e3
|
|
279
275
|
);
|
|
280
276
|
const cacheKey = `${this.fftSize}:${startOffsetMs}:${startTime}`;
|
|
281
277
|
const cachedFrame = this.#byteTimeDomainCache.get(cacheKey);
|
|
282
|
-
if (cachedFrame)
|
|
283
|
-
return cachedFrame;
|
|
284
|
-
}
|
|
278
|
+
if (cachedFrame) return cachedFrame;
|
|
285
279
|
const audioContext = new OfflineAudioContext(
|
|
286
280
|
2,
|
|
287
281
|
48e3 * (1 / 30),
|
|
288
282
|
48e3
|
|
289
283
|
);
|
|
284
|
+
const source = audioContext.createBufferSource();
|
|
285
|
+
source.buffer = audioBuffer;
|
|
290
286
|
const analyser = audioContext.createAnalyser();
|
|
291
287
|
analyser.fftSize = this.fftSize;
|
|
292
|
-
const gainNode = audioContext.createGain();
|
|
293
|
-
gainNode.gain.value = 10;
|
|
294
|
-
analyser.smoothingTimeConstant = 0.4;
|
|
295
288
|
analyser.minDecibels = -90;
|
|
296
|
-
analyser.maxDecibels = -
|
|
297
|
-
const
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
filter.frequency.value = 1e3;
|
|
302
|
-
filter.Q.value = 0.5;
|
|
303
|
-
audioBufferSource.connect(gainNode);
|
|
304
|
-
gainNode.connect(filter);
|
|
305
|
-
filter.connect(analyser);
|
|
289
|
+
analyser.maxDecibels = -20;
|
|
290
|
+
const gainNode = audioContext.createGain();
|
|
291
|
+
gainNode.gain.value = 2;
|
|
292
|
+
source.connect(gainNode);
|
|
293
|
+
gainNode.connect(analyser);
|
|
306
294
|
analyser.connect(audioContext.destination);
|
|
307
|
-
|
|
295
|
+
source.start(0, startTime, 1 / 30);
|
|
296
|
+
const dataLength = analyser.fftSize / 2;
|
|
308
297
|
try {
|
|
309
298
|
await audioContext.startRendering();
|
|
310
|
-
const frameData = new Uint8Array(
|
|
299
|
+
const frameData = new Uint8Array(dataLength);
|
|
311
300
|
analyser.getByteTimeDomainData(frameData);
|
|
312
|
-
|
|
313
|
-
|
|
301
|
+
const points = new Uint8Array(dataLength);
|
|
302
|
+
for (let i = 0; i < dataLength; i++) {
|
|
303
|
+
const pointSamples = frameData.slice(
|
|
304
|
+
i * (frameData.length / dataLength),
|
|
305
|
+
(i + 1) * (frameData.length / dataLength)
|
|
306
|
+
);
|
|
307
|
+
const rms = Math.sqrt(
|
|
308
|
+
pointSamples.reduce((sum, sample) => {
|
|
309
|
+
const normalized = (sample - 128) / 128;
|
|
310
|
+
return sum + normalized * normalized;
|
|
311
|
+
}, 0) / pointSamples.length
|
|
312
|
+
);
|
|
313
|
+
const avgSign = Math.sign(
|
|
314
|
+
pointSamples.reduce((sum, sample) => sum + (sample - 128), 0)
|
|
315
|
+
);
|
|
316
|
+
points[i] = Math.min(255, Math.round(128 + avgSign * rms * 128));
|
|
317
|
+
}
|
|
318
|
+
this.#byteTimeDomainCache.set(cacheKey, points);
|
|
319
|
+
return points;
|
|
314
320
|
} finally {
|
|
315
|
-
|
|
321
|
+
source.disconnect();
|
|
316
322
|
analyser.disconnect();
|
|
317
323
|
}
|
|
318
324
|
})
|
|
@@ -324,15 +330,12 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
|
|
|
324
330
|
let weightSum = 0;
|
|
325
331
|
framesData.forEach((frame, frameIndex) => {
|
|
326
332
|
const decayWeight = _EFMedia2.DECAY_WEIGHT ** frameIndex;
|
|
327
|
-
weightedSum += frame[i] * decayWeight;
|
|
333
|
+
weightedSum += (frame[i] ?? 0) * decayWeight;
|
|
328
334
|
weightSum += decayWeight;
|
|
329
335
|
});
|
|
330
336
|
smoothedData[i] = Math.min(255, Math.round(weightedSum / weightSum));
|
|
331
337
|
}
|
|
332
|
-
this.#byteTimeDomainCache.set(
|
|
333
|
-
smoothedKey,
|
|
334
|
-
smoothedData.slice(0, Math.floor(smoothedData.length * 0.8))
|
|
335
|
-
);
|
|
338
|
+
this.#byteTimeDomainCache.set(smoothedKey, smoothedData);
|
|
336
339
|
return smoothedData;
|
|
337
340
|
}
|
|
338
341
|
});
|
|
@@ -378,11 +381,19 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
|
|
|
378
381
|
);
|
|
379
382
|
const analyser = audioContext.createAnalyser();
|
|
380
383
|
analyser.fftSize = this.fftSize;
|
|
381
|
-
analyser.minDecibels =
|
|
382
|
-
analyser.maxDecibels =
|
|
384
|
+
analyser.minDecibels = -90;
|
|
385
|
+
analyser.maxDecibels = -10;
|
|
386
|
+
const gainNode = audioContext.createGain();
|
|
387
|
+
gainNode.gain.value = 3;
|
|
388
|
+
const filter = audioContext.createBiquadFilter();
|
|
389
|
+
filter.type = "bandpass";
|
|
390
|
+
filter.frequency.value = 15e3;
|
|
391
|
+
filter.Q.value = 0.05;
|
|
383
392
|
const audioBufferSource = audioContext.createBufferSource();
|
|
384
393
|
audioBufferSource.buffer = audioBuffer;
|
|
385
|
-
audioBufferSource.connect(
|
|
394
|
+
audioBufferSource.connect(filter);
|
|
395
|
+
filter.connect(gainNode);
|
|
396
|
+
gainNode.connect(analyser);
|
|
386
397
|
analyser.connect(audioContext.destination);
|
|
387
398
|
audioBufferSource.start(0, startTime, 1 / 30);
|
|
388
399
|
try {
|
|
@@ -417,8 +428,9 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
|
|
|
417
428
|
0,
|
|
418
429
|
Math.floor(smoothedData.length / 2)
|
|
419
430
|
);
|
|
420
|
-
this
|
|
421
|
-
|
|
431
|
+
const processedData = this.shouldInterpolateFrequencies ? processFFTData(slicedData) : slicedData;
|
|
432
|
+
this.#frequencyDataCache.set(smoothedKey, processedData);
|
|
433
|
+
return processedData;
|
|
422
434
|
}
|
|
423
435
|
});
|
|
424
436
|
}
|
|
@@ -633,11 +645,27 @@ const _EFMedia = class _EFMedia2 extends EFTargetable(
|
|
|
633
645
|
endMs: lastFragment.dts / audioTrackIndex.timescale * 1e3 + lastFragment.duration / audioTrackIndex.timescale * 1e3 - this.trimEndMs
|
|
634
646
|
};
|
|
635
647
|
}
|
|
636
|
-
|
|
637
|
-
|
|
648
|
+
set fftSize(value) {
|
|
649
|
+
const oldValue = this.fftSize;
|
|
650
|
+
this.setAttribute("fft-size", String(value));
|
|
651
|
+
this.requestUpdate("fft-size", oldValue);
|
|
638
652
|
}
|
|
639
|
-
|
|
640
|
-
|
|
653
|
+
set fftDecay(value) {
|
|
654
|
+
const oldValue = this.fftDecay;
|
|
655
|
+
this.setAttribute("fft-decay", String(value));
|
|
656
|
+
this.requestUpdate("fft-decay", oldValue);
|
|
657
|
+
}
|
|
658
|
+
get fftSize() {
|
|
659
|
+
return Number.parseInt(this.getAttribute("fft-size") ?? "128", 10);
|
|
660
|
+
}
|
|
661
|
+
get fftDecay() {
|
|
662
|
+
return Number.parseInt(this.getAttribute("fft-decay") ?? "8", 10);
|
|
663
|
+
}
|
|
664
|
+
get shouldInterpolateFrequencies() {
|
|
665
|
+
if (this.hasAttribute("interpolate-frequencies")) {
|
|
666
|
+
return this.getAttribute("interpolate-frequencies") !== "false";
|
|
667
|
+
}
|
|
668
|
+
return false;
|
|
641
669
|
}
|
|
642
670
|
static {
|
|
643
671
|
this.DECAY_WEIGHT = 0.7;
|
|
@@ -672,13 +700,47 @@ __decorateClass([
|
|
|
672
700
|
__decorateClass([
|
|
673
701
|
state()
|
|
674
702
|
], _EFMedia.prototype, "desiredSeekTimeMs", 2);
|
|
675
|
-
__decorateClass([
|
|
676
|
-
property({ type: Number })
|
|
677
|
-
], _EFMedia.prototype, "fftSize", 2);
|
|
678
|
-
__decorateClass([
|
|
679
|
-
property({ type: Number })
|
|
680
|
-
], _EFMedia.prototype, "fftDecay", 2);
|
|
681
703
|
let EFMedia = _EFMedia;
|
|
704
|
+
function processFFTData(fftData, zeroThresholdPercent = 0.1) {
|
|
705
|
+
const totalBins = fftData.length;
|
|
706
|
+
const zeroThresholdCount = Math.floor(totalBins * zeroThresholdPercent);
|
|
707
|
+
let zeroCount = 0;
|
|
708
|
+
let cutoffIndex = totalBins;
|
|
709
|
+
for (let i = totalBins - 1; i >= 0; i--) {
|
|
710
|
+
if (fftData[i] < 10) {
|
|
711
|
+
zeroCount++;
|
|
712
|
+
} else {
|
|
713
|
+
if (zeroCount >= zeroThresholdCount) {
|
|
714
|
+
cutoffIndex = i + 1;
|
|
715
|
+
break;
|
|
716
|
+
}
|
|
717
|
+
}
|
|
718
|
+
}
|
|
719
|
+
if (cutoffIndex < zeroThresholdCount) {
|
|
720
|
+
return fftData;
|
|
721
|
+
}
|
|
722
|
+
const goodData = fftData.slice(0, cutoffIndex);
|
|
723
|
+
const resampledData = interpolateData(goodData, fftData.length);
|
|
724
|
+
return resampledData;
|
|
725
|
+
}
|
|
726
|
+
function interpolateData(data, targetSize) {
|
|
727
|
+
const resampled = new Uint8Array(targetSize);
|
|
728
|
+
const dataLength = data.length;
|
|
729
|
+
for (let i = 0; i < targetSize; i++) {
|
|
730
|
+
const ratio = i / (targetSize - 1) * (dataLength - 1);
|
|
731
|
+
const index = Math.floor(ratio);
|
|
732
|
+
const fraction = ratio - index;
|
|
733
|
+
if (index >= dataLength - 1) {
|
|
734
|
+
resampled[i] = data[dataLength - 1];
|
|
735
|
+
} else {
|
|
736
|
+
resampled[i] = Math.round(
|
|
737
|
+
// biome-ignore lint/style/noNonNullAssertion: Manual bounds check
|
|
738
|
+
data[index] * (1 - fraction) + data[index + 1] * fraction
|
|
739
|
+
);
|
|
740
|
+
}
|
|
741
|
+
}
|
|
742
|
+
return resampled;
|
|
743
|
+
}
|
|
682
744
|
export {
|
|
683
745
|
EFMedia,
|
|
684
746
|
deepGetMediaElements
|
|
@@ -26,7 +26,6 @@ export declare class EFWaveform extends EFWaveform_base {
|
|
|
26
26
|
protected drawBars(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
|
|
27
27
|
protected drawBricks(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
|
|
28
28
|
protected drawRoundBars(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
|
|
29
|
-
protected drawEqualizer(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
|
|
30
29
|
protected drawLine(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
|
|
31
30
|
protected drawCurve(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
|
|
32
31
|
protected drawPixel(ctx: CanvasRenderingContext2D, frequencyData: Uint8Array): void;
|
|
@@ -159,7 +159,7 @@ let EFWaveform = class extends EFTemporal(TWMixin(LitElement)) {
|
|
|
159
159
|
ctx.clearRect(0, 0, waveWidth, waveHeight);
|
|
160
160
|
const path = new Path2D();
|
|
161
161
|
frequencyData.forEach((value, i) => {
|
|
162
|
-
const normalizedValue =
|
|
162
|
+
const normalizedValue = value / 255;
|
|
163
163
|
const barHeight = normalizedValue * waveHeight;
|
|
164
164
|
const y = (waveHeight - barHeight) / 2;
|
|
165
165
|
const x = waveWidth * paddingOuter + i * (barWidth * (1 + paddingInner));
|
|
@@ -178,7 +178,7 @@ let EFWaveform = class extends EFTemporal(TWMixin(LitElement)) {
|
|
|
178
178
|
const verticalGap = boxSize * 0.2;
|
|
179
179
|
const maxBricks = Math.floor(waveHeight / (boxSize + verticalGap));
|
|
180
180
|
frequencyData.forEach((value, i) => {
|
|
181
|
-
const normalizedValue =
|
|
181
|
+
const normalizedValue = value / 255;
|
|
182
182
|
const brickCount = Math.floor(normalizedValue * maxBricks);
|
|
183
183
|
for (let j = 0; j < brickCount; j++) {
|
|
184
184
|
const x = columnWidth * i;
|
|
@@ -200,7 +200,7 @@ let EFWaveform = class extends EFTemporal(TWMixin(LitElement)) {
|
|
|
200
200
|
ctx.clearRect(0, 0, waveWidth, waveHeight);
|
|
201
201
|
const path = new Path2D();
|
|
202
202
|
frequencyData.forEach((value, i) => {
|
|
203
|
-
const normalizedValue =
|
|
203
|
+
const normalizedValue = value / 255;
|
|
204
204
|
const height = normalizedValue * waveHeight;
|
|
205
205
|
const x = waveWidth * paddingOuter + i * (barWidth * (1 + paddingInner));
|
|
206
206
|
const y = (waveHeight - height) / 2;
|
|
@@ -208,34 +208,13 @@ let EFWaveform = class extends EFTemporal(TWMixin(LitElement)) {
|
|
|
208
208
|
});
|
|
209
209
|
ctx.fill(path);
|
|
210
210
|
}
|
|
211
|
-
drawEqualizer(ctx, frequencyData) {
|
|
212
|
-
const canvas = ctx.canvas;
|
|
213
|
-
const waveWidth = canvas.width;
|
|
214
|
-
const waveHeight = canvas.height;
|
|
215
|
-
const baseline = waveHeight / 2;
|
|
216
|
-
const barWidth = waveWidth / frequencyData.length * 0.8;
|
|
217
|
-
ctx.clearRect(0, 0, waveWidth, waveHeight);
|
|
218
|
-
const baselinePath = new Path2D();
|
|
219
|
-
const barsPath = new Path2D();
|
|
220
|
-
baselinePath.moveTo(0, baseline);
|
|
221
|
-
baselinePath.lineTo(waveWidth, baseline);
|
|
222
|
-
frequencyData.forEach((value, i) => {
|
|
223
|
-
const height = value / 255 * (waveHeight / 2);
|
|
224
|
-
const x = i * (waveWidth / frequencyData.length);
|
|
225
|
-
const y = baseline - height;
|
|
226
|
-
barsPath.rect(x, y, barWidth, Math.max(height * 2, 1));
|
|
227
|
-
});
|
|
228
|
-
ctx.lineWidth = 2;
|
|
229
|
-
ctx.stroke(baselinePath);
|
|
230
|
-
ctx.fill(barsPath);
|
|
231
|
-
}
|
|
232
211
|
drawLine(ctx, frequencyData) {
|
|
233
212
|
const canvas = ctx.canvas;
|
|
234
213
|
const waveWidth = canvas.width;
|
|
235
214
|
const waveHeight = canvas.height;
|
|
236
215
|
ctx.clearRect(0, 0, waveWidth, waveHeight);
|
|
237
216
|
const path = new Path2D();
|
|
238
|
-
const sampleRate =
|
|
217
|
+
const sampleRate = 1;
|
|
239
218
|
for (let i = 0; i < frequencyData.length; i += sampleRate) {
|
|
240
219
|
const x = i / frequencyData.length * waveWidth;
|
|
241
220
|
const y = (1 - (frequencyData[i] ?? 0) / 255) * waveHeight;
|
|
@@ -282,7 +261,7 @@ let EFWaveform = class extends EFTemporal(TWMixin(LitElement)) {
|
|
|
282
261
|
ctx.clearRect(0, 0, waveWidth, waveHeight);
|
|
283
262
|
const path = new Path2D();
|
|
284
263
|
frequencyData.forEach((value, i) => {
|
|
285
|
-
const normalizedValue =
|
|
264
|
+
const normalizedValue = value / 255;
|
|
286
265
|
const x = i * (waveWidth / frequencyData.length);
|
|
287
266
|
const barHeight = normalizedValue * (waveHeight / 2);
|
|
288
267
|
const y = baseline - barHeight;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@editframe/elements",
|
|
3
|
-
"version": "0.15.0-beta.
|
|
3
|
+
"version": "0.15.0-beta.15",
|
|
4
4
|
"description": "",
|
|
5
5
|
"exports": {
|
|
6
6
|
".": {
|
|
@@ -27,7 +27,7 @@
|
|
|
27
27
|
"license": "UNLICENSED",
|
|
28
28
|
"dependencies": {
|
|
29
29
|
"@bramus/style-observer": "^1.3.0",
|
|
30
|
-
"@editframe/assets": "0.15.0-beta.
|
|
30
|
+
"@editframe/assets": "0.15.0-beta.15",
|
|
31
31
|
"@lit/context": "^1.1.2",
|
|
32
32
|
"@lit/task": "^1.0.1",
|
|
33
33
|
"d3": "^7.9.0",
|
package/src/elements/EFMedia.ts
CHANGED
|
@@ -590,14 +590,33 @@ export class EFMedia extends EFTargetable(
|
|
|
590
590
|
};
|
|
591
591
|
}
|
|
592
592
|
|
|
593
|
-
|
|
594
|
-
|
|
593
|
+
set fftSize(value: number) {
|
|
594
|
+
const oldValue = this.fftSize;
|
|
595
|
+
this.setAttribute("fft-size", String(value));
|
|
596
|
+
this.requestUpdate("fft-size", oldValue);
|
|
597
|
+
}
|
|
595
598
|
|
|
596
|
-
|
|
597
|
-
|
|
599
|
+
set fftDecay(value: number) {
|
|
600
|
+
const oldValue = this.fftDecay;
|
|
601
|
+
this.setAttribute("fft-decay", String(value));
|
|
602
|
+
this.requestUpdate("fft-decay", oldValue);
|
|
603
|
+
}
|
|
604
|
+
|
|
605
|
+
get fftSize() {
|
|
606
|
+
return Number.parseInt(this.getAttribute("fft-size") ?? "128", 10);
|
|
607
|
+
}
|
|
608
|
+
|
|
609
|
+
get fftDecay() {
|
|
610
|
+
return Number.parseInt(this.getAttribute("fft-decay") ?? "8", 10);
|
|
611
|
+
}
|
|
612
|
+
|
|
613
|
+
get shouldInterpolateFrequencies() {
|
|
614
|
+
if (this.hasAttribute("interpolate-frequencies")) {
|
|
615
|
+
return this.getAttribute("interpolate-frequencies") !== "false";
|
|
616
|
+
}
|
|
617
|
+
return false;
|
|
618
|
+
}
|
|
598
619
|
|
|
599
|
-
private static readonly MIN_DB = -90;
|
|
600
|
-
private static readonly MAX_DB = -20;
|
|
601
620
|
private static readonly DECAY_WEIGHT = 0.7;
|
|
602
621
|
|
|
603
622
|
// Update FREQ_WEIGHTS to use the instance fftSize instead of a static value
|
|
@@ -641,16 +660,15 @@ export class EFMedia extends EFTargetable(
|
|
|
641
660
|
const currentTimeMs = this.currentSourceTimeMs;
|
|
642
661
|
const startOffsetMs = this.audioBufferTask.value.startOffsetMs;
|
|
643
662
|
const audioBuffer = this.audioBufferTask.value.buffer;
|
|
644
|
-
const smoothedKey = `${this.fftSize}:${this.fftDecay}:${startOffsetMs}:${currentTimeMs}`;
|
|
645
663
|
|
|
646
|
-
const
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
}
|
|
664
|
+
const smoothedKey = `${this.fftSize}:${this.fftDecay}:${startOffsetMs}:${currentTimeMs}`;
|
|
665
|
+
const cachedData = this.#byteTimeDomainCache.get(smoothedKey);
|
|
666
|
+
if (cachedData) return cachedData;
|
|
650
667
|
|
|
668
|
+
// Process multiple frames with decay, similar to the reference code
|
|
651
669
|
const framesData = await Promise.all(
|
|
652
|
-
Array.from({ length: this.fftDecay }, async (_,
|
|
653
|
-
const frameOffset =
|
|
670
|
+
Array.from({ length: this.fftDecay }, async (_, frameIndex) => {
|
|
671
|
+
const frameOffset = frameIndex * (1000 / 30);
|
|
654
672
|
const startTime = Math.max(
|
|
655
673
|
0,
|
|
656
674
|
(currentTimeMs - frameOffset - startOffsetMs) / 1000,
|
|
@@ -658,83 +676,91 @@ export class EFMedia extends EFTargetable(
|
|
|
658
676
|
|
|
659
677
|
const cacheKey = `${this.fftSize}:${startOffsetMs}:${startTime}`;
|
|
660
678
|
const cachedFrame = this.#byteTimeDomainCache.get(cacheKey);
|
|
661
|
-
if (cachedFrame)
|
|
662
|
-
return cachedFrame;
|
|
663
|
-
}
|
|
679
|
+
if (cachedFrame) return cachedFrame;
|
|
664
680
|
|
|
665
681
|
const audioContext = new OfflineAudioContext(
|
|
666
682
|
2,
|
|
667
683
|
48000 * (1 / 30),
|
|
668
684
|
48000,
|
|
669
685
|
);
|
|
670
|
-
const analyser = audioContext.createAnalyser();
|
|
671
|
-
analyser.fftSize = this.fftSize;
|
|
672
686
|
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
gainNode.gain.value = 10.0; // Try a higher gain
|
|
687
|
+
const source = audioContext.createBufferSource();
|
|
688
|
+
source.buffer = audioBuffer;
|
|
676
689
|
|
|
677
|
-
//
|
|
678
|
-
analyser
|
|
690
|
+
// Create analyzer for PCM data
|
|
691
|
+
const analyser = audioContext.createAnalyser();
|
|
692
|
+
analyser.fftSize = this.fftSize; // Ensure enough samples
|
|
679
693
|
analyser.minDecibels = -90;
|
|
680
|
-
analyser.maxDecibels = -
|
|
694
|
+
analyser.maxDecibels = -20;
|
|
681
695
|
|
|
682
|
-
const
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
// Add a bandpass filter to focus on the most active frequency ranges
|
|
686
|
-
const filter = audioContext.createBiquadFilter();
|
|
687
|
-
filter.type = "bandpass";
|
|
688
|
-
filter.frequency.value = 1000; // Center frequency in Hz
|
|
689
|
-
filter.Q.value = 0.5; // Width of the band
|
|
696
|
+
const gainNode = audioContext.createGain();
|
|
697
|
+
gainNode.gain.value = 2.0; // Amplify the signal
|
|
690
698
|
|
|
691
|
-
|
|
692
|
-
gainNode.connect(
|
|
693
|
-
filter.connect(analyser);
|
|
699
|
+
source.connect(gainNode);
|
|
700
|
+
gainNode.connect(analyser);
|
|
694
701
|
analyser.connect(audioContext.destination);
|
|
695
702
|
|
|
696
|
-
|
|
703
|
+
source.start(0, startTime, 1 / 30);
|
|
697
704
|
|
|
705
|
+
const dataLength = analyser.fftSize / 2;
|
|
698
706
|
try {
|
|
699
707
|
await audioContext.startRendering();
|
|
700
|
-
|
|
701
|
-
const frameData = new Uint8Array(analyser.fftSize);
|
|
708
|
+
const frameData = new Uint8Array(dataLength);
|
|
702
709
|
analyser.getByteTimeDomainData(frameData);
|
|
703
710
|
|
|
704
|
-
|
|
705
|
-
|
|
711
|
+
// const points = frameData;
|
|
712
|
+
// Calculate RMS and midpoint values
|
|
713
|
+
const points = new Uint8Array(dataLength);
|
|
714
|
+
for (let i = 0; i < dataLength; i++) {
|
|
715
|
+
const pointSamples = frameData.slice(
|
|
716
|
+
i * (frameData.length / dataLength),
|
|
717
|
+
(i + 1) * (frameData.length / dataLength),
|
|
718
|
+
);
|
|
719
|
+
|
|
720
|
+
// Calculate RMS while preserving sign
|
|
721
|
+
const rms = Math.sqrt(
|
|
722
|
+
pointSamples.reduce((sum, sample) => {
|
|
723
|
+
const normalized = (sample - 128) / 128;
|
|
724
|
+
return sum + normalized * normalized;
|
|
725
|
+
}, 0) / pointSamples.length,
|
|
726
|
+
);
|
|
727
|
+
|
|
728
|
+
// Get average sign of the samples to determine direction
|
|
729
|
+
const avgSign = Math.sign(
|
|
730
|
+
pointSamples.reduce((sum, sample) => sum + (sample - 128), 0),
|
|
731
|
+
);
|
|
732
|
+
|
|
733
|
+
// Convert RMS back to byte range, preserving direction
|
|
734
|
+
points[i] = Math.min(255, Math.round(128 + avgSign * rms * 128));
|
|
735
|
+
}
|
|
736
|
+
|
|
737
|
+
this.#byteTimeDomainCache.set(cacheKey, points);
|
|
738
|
+
return points;
|
|
706
739
|
} finally {
|
|
707
|
-
|
|
740
|
+
source.disconnect();
|
|
708
741
|
analyser.disconnect();
|
|
709
742
|
}
|
|
710
743
|
}),
|
|
711
744
|
);
|
|
712
745
|
|
|
746
|
+
// Combine frames with decay weighting
|
|
713
747
|
const frameLength = framesData[0]?.length ?? 0;
|
|
714
748
|
const smoothedData = new Uint8Array(frameLength);
|
|
715
749
|
|
|
716
|
-
// Combine frames with decay
|
|
717
750
|
for (let i = 0; i < frameLength; i++) {
|
|
718
751
|
let weightedSum = 0;
|
|
719
752
|
let weightSum = 0;
|
|
720
753
|
|
|
721
754
|
framesData.forEach((frame, frameIndex) => {
|
|
722
755
|
const decayWeight = EFMedia.DECAY_WEIGHT ** frameIndex;
|
|
723
|
-
|
|
724
|
-
weightedSum += frame[i]! * decayWeight;
|
|
756
|
+
weightedSum += (frame[i] ?? 0) * decayWeight;
|
|
725
757
|
weightSum += decayWeight;
|
|
726
758
|
});
|
|
727
759
|
|
|
728
760
|
smoothedData[i] = Math.min(255, Math.round(weightedSum / weightSum));
|
|
729
761
|
}
|
|
730
762
|
|
|
731
|
-
|
|
732
|
-
// No need to slice the data either since we want the full waveform
|
|
733
|
-
|
|
734
|
-
this.#byteTimeDomainCache.set(
|
|
735
|
-
smoothedKey,
|
|
736
|
-
smoothedData.slice(0, Math.floor(smoothedData.length * 0.8)),
|
|
737
|
-
);
|
|
763
|
+
this.#byteTimeDomainCache.set(smoothedKey, smoothedData);
|
|
738
764
|
return smoothedData;
|
|
739
765
|
},
|
|
740
766
|
});
|
|
@@ -789,13 +815,23 @@ export class EFMedia extends EFTargetable(
|
|
|
789
815
|
);
|
|
790
816
|
const analyser = audioContext.createAnalyser();
|
|
791
817
|
analyser.fftSize = this.fftSize;
|
|
792
|
-
analyser.minDecibels =
|
|
793
|
-
analyser.maxDecibels =
|
|
818
|
+
analyser.minDecibels = -90;
|
|
819
|
+
analyser.maxDecibels = -10;
|
|
820
|
+
|
|
821
|
+
const gainNode = audioContext.createGain();
|
|
822
|
+
gainNode.gain.value = 3.0;
|
|
823
|
+
|
|
824
|
+
const filter = audioContext.createBiquadFilter();
|
|
825
|
+
filter.type = "bandpass";
|
|
826
|
+
filter.frequency.value = 15000;
|
|
827
|
+
filter.Q.value = 0.05;
|
|
794
828
|
|
|
795
829
|
const audioBufferSource = audioContext.createBufferSource();
|
|
796
830
|
audioBufferSource.buffer = audioBuffer;
|
|
797
831
|
|
|
798
|
-
audioBufferSource.connect(
|
|
832
|
+
audioBufferSource.connect(filter);
|
|
833
|
+
filter.connect(gainNode);
|
|
834
|
+
gainNode.connect(analyser);
|
|
799
835
|
analyser.connect(audioContext.destination);
|
|
800
836
|
|
|
801
837
|
audioBufferSource.start(0, startTime, 1 / 30);
|
|
@@ -825,7 +861,7 @@ export class EFMedia extends EFTargetable(
|
|
|
825
861
|
|
|
826
862
|
framesData.forEach((frame, frameIndex) => {
|
|
827
863
|
const decayWeight = EFMedia.DECAY_WEIGHT ** frameIndex;
|
|
828
|
-
// biome-ignore lint/style/noNonNullAssertion:
|
|
864
|
+
// biome-ignore lint/style/noNonNullAssertion: Manual bounds check
|
|
829
865
|
weightedSum += frame[i]! * decayWeight;
|
|
830
866
|
weightSum += decayWeight;
|
|
831
867
|
});
|
|
@@ -835,7 +871,7 @@ export class EFMedia extends EFTargetable(
|
|
|
835
871
|
|
|
836
872
|
// Apply frequency weights using instance FREQ_WEIGHTS
|
|
837
873
|
smoothedData.forEach((value, i) => {
|
|
838
|
-
// biome-ignore lint/style/noNonNullAssertion:
|
|
874
|
+
// biome-ignore lint/style/noNonNullAssertion: Manual bounds check
|
|
839
875
|
const freqWeight = this.FREQ_WEIGHTS[i]!;
|
|
840
876
|
smoothedData[i] = Math.min(255, Math.round(value * freqWeight));
|
|
841
877
|
});
|
|
@@ -846,8 +882,70 @@ export class EFMedia extends EFTargetable(
|
|
|
846
882
|
0,
|
|
847
883
|
Math.floor(smoothedData.length / 2),
|
|
848
884
|
);
|
|
849
|
-
this
|
|
850
|
-
|
|
885
|
+
const processedData = this.shouldInterpolateFrequencies
|
|
886
|
+
? processFFTData(slicedData)
|
|
887
|
+
: slicedData;
|
|
888
|
+
this.#frequencyDataCache.set(smoothedKey, processedData);
|
|
889
|
+
return processedData;
|
|
851
890
|
},
|
|
852
891
|
});
|
|
853
892
|
}
|
|
893
|
+
|
|
894
|
+
function processFFTData(fftData: Uint8Array, zeroThresholdPercent = 0.1) {
|
|
895
|
+
// Step 1: Determine the threshold for zeros
|
|
896
|
+
const totalBins = fftData.length;
|
|
897
|
+
const zeroThresholdCount = Math.floor(totalBins * zeroThresholdPercent);
|
|
898
|
+
|
|
899
|
+
// Step 2: Interrogate the FFT output to find the cutoff point
|
|
900
|
+
let zeroCount = 0;
|
|
901
|
+
let cutoffIndex = totalBins; // Default to the end of the array
|
|
902
|
+
|
|
903
|
+
for (let i = totalBins - 1; i >= 0; i--) {
|
|
904
|
+
// biome-ignore lint/style/noNonNullAssertion: Manual bounds check
|
|
905
|
+
if (fftData[i]! < 10) {
|
|
906
|
+
zeroCount++;
|
|
907
|
+
} else {
|
|
908
|
+
// If we encounter a non-zero value, we can stop
|
|
909
|
+
if (zeroCount >= zeroThresholdCount) {
|
|
910
|
+
cutoffIndex = i + 1; // Include this index
|
|
911
|
+
break;
|
|
912
|
+
}
|
|
913
|
+
}
|
|
914
|
+
}
|
|
915
|
+
|
|
916
|
+
if (cutoffIndex < zeroThresholdCount) {
|
|
917
|
+
return fftData;
|
|
918
|
+
}
|
|
919
|
+
|
|
920
|
+
// Step 3: Resample the "good" portion of the data
|
|
921
|
+
const goodData = fftData.slice(0, cutoffIndex);
|
|
922
|
+
const resampledData = interpolateData(goodData, fftData.length);
|
|
923
|
+
|
|
924
|
+
return resampledData;
|
|
925
|
+
}
|
|
926
|
+
|
|
927
|
+
function interpolateData(data: Uint8Array, targetSize: number) {
|
|
928
|
+
const resampled = new Uint8Array(targetSize);
|
|
929
|
+
const dataLength = data.length;
|
|
930
|
+
|
|
931
|
+
for (let i = 0; i < targetSize; i++) {
|
|
932
|
+
// Calculate the corresponding index in the original data
|
|
933
|
+
const ratio = (i / (targetSize - 1)) * (dataLength - 1);
|
|
934
|
+
const index = Math.floor(ratio);
|
|
935
|
+
const fraction = ratio - index;
|
|
936
|
+
|
|
937
|
+
// Handle edge cases
|
|
938
|
+
if (index >= dataLength - 1) {
|
|
939
|
+
// biome-ignore lint/style/noNonNullAssertion: Manual bounds check
|
|
940
|
+
resampled[i] = data[dataLength - 1]!; // Last value
|
|
941
|
+
} else {
|
|
942
|
+
// Linear interpolation
|
|
943
|
+
resampled[i] = Math.round(
|
|
944
|
+
// biome-ignore lint/style/noNonNullAssertion: Manual bounds check
|
|
945
|
+
data[index]! * (1 - fraction) + data[index + 1]! * fraction,
|
|
946
|
+
);
|
|
947
|
+
}
|
|
948
|
+
}
|
|
949
|
+
|
|
950
|
+
return resampled;
|
|
951
|
+
}
|