spectral-display 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +174 -0
- package/dist/index.d.ts +129 -0
- package/dist/index.js +1751 -0
- package/package.json +45 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,1751 @@
|
|
|
1
|
+
import { useRef, useEffect, useState, useCallback } from 'react';
|
|
2
|
+
import { jsx } from 'react/jsx-runtime';
|
|
3
|
+
|
|
4
|
+
// src/LoudnessCanvas.tsx
|
|
5
|
+
var DEFAULT_COLORS = {
|
|
6
|
+
rms: "rgba(0, 200, 255, 0.6)",
|
|
7
|
+
momentary: "rgba(255, 200, 0, 0.8)",
|
|
8
|
+
shortTerm: "rgba(0, 255, 100, 0.8)",
|
|
9
|
+
integrated: "rgba(255, 255, 255, 0.9)",
|
|
10
|
+
truePeak: "rgba(255, 80, 80, 0.9)"
|
|
11
|
+
};
|
|
12
|
+
function lufsToY(lufs, canvasHeight) {
|
|
13
|
+
if (!isFinite(lufs) || lufs < -60) return canvasHeight / 2;
|
|
14
|
+
const amplitude = Math.pow(10, lufs / 20);
|
|
15
|
+
return canvasHeight / 2 - amplitude * (canvasHeight / 2);
|
|
16
|
+
}
|
|
17
|
+
function drawRmsEnvelope(loudness, color, dimensions, context) {
|
|
18
|
+
const { rmsEnvelope, pointCount } = loudness;
|
|
19
|
+
const { width, height } = dimensions;
|
|
20
|
+
if (pointCount === 0) return;
|
|
21
|
+
const centerY = height / 2;
|
|
22
|
+
const halfHeight = height / 2;
|
|
23
|
+
const stride = (pointCount - 1) / width;
|
|
24
|
+
context.beginPath();
|
|
25
|
+
context.moveTo(0, centerY);
|
|
26
|
+
for (let px = 0; px < width; px++) {
|
|
27
|
+
const ptStart = Math.floor(px * stride);
|
|
28
|
+
const ptEnd = Math.min(Math.floor((px + 1) * stride), pointCount - 1);
|
|
29
|
+
let maxRms = 0;
|
|
30
|
+
for (let pt = ptStart; pt <= ptEnd; pt++) {
|
|
31
|
+
const rms = rmsEnvelope[pt];
|
|
32
|
+
if (rms > maxRms) maxRms = rms;
|
|
33
|
+
}
|
|
34
|
+
context.lineTo(px, centerY - maxRms * halfHeight);
|
|
35
|
+
}
|
|
36
|
+
for (let px = width - 1; px >= 0; px--) {
|
|
37
|
+
const ptStart = Math.floor(px * stride);
|
|
38
|
+
const ptEnd = Math.min(Math.floor((px + 1) * stride), pointCount - 1);
|
|
39
|
+
let maxRms = 0;
|
|
40
|
+
for (let pt = ptStart; pt <= ptEnd; pt++) {
|
|
41
|
+
const rms = rmsEnvelope[pt];
|
|
42
|
+
if (rms > maxRms) maxRms = rms;
|
|
43
|
+
}
|
|
44
|
+
context.lineTo(px, centerY + maxRms * halfHeight);
|
|
45
|
+
}
|
|
46
|
+
context.closePath();
|
|
47
|
+
context.fillStyle = color;
|
|
48
|
+
context.fill();
|
|
49
|
+
}
|
|
50
|
+
function drawLufsLine(lufsData, pointCount, color, dimensions, context) {
|
|
51
|
+
const { width, height } = dimensions;
|
|
52
|
+
if (pointCount === 0) return;
|
|
53
|
+
const stride = (pointCount - 1) / width;
|
|
54
|
+
context.beginPath();
|
|
55
|
+
context.moveTo(0, lufsToY(lufsData[0], height));
|
|
56
|
+
for (let px = 1; px < width; px++) {
|
|
57
|
+
const pt = Math.min(Math.round(px * stride), pointCount - 1);
|
|
58
|
+
context.lineTo(px, lufsToY(lufsData[pt], height));
|
|
59
|
+
}
|
|
60
|
+
context.strokeStyle = color;
|
|
61
|
+
context.lineWidth = 1.5;
|
|
62
|
+
context.stroke();
|
|
63
|
+
}
|
|
64
|
+
function drawAmplitudeLine(amplitude, color, dimensions, context) {
|
|
65
|
+
const { width, height } = dimensions;
|
|
66
|
+
if (amplitude <= 0) return;
|
|
67
|
+
const py = height / 2 - amplitude * (height / 2);
|
|
68
|
+
context.beginPath();
|
|
69
|
+
context.setLineDash([6, 4]);
|
|
70
|
+
context.moveTo(0, py);
|
|
71
|
+
context.lineTo(width, py);
|
|
72
|
+
context.strokeStyle = color;
|
|
73
|
+
context.lineWidth = 1.5;
|
|
74
|
+
context.stroke();
|
|
75
|
+
context.setLineDash([]);
|
|
76
|
+
}
|
|
77
|
+
function drawIntegratedLine(integratedLufs, color, dimensions, context) {
|
|
78
|
+
const { width, height } = dimensions;
|
|
79
|
+
if (!isFinite(integratedLufs) || integratedLufs < -60) return;
|
|
80
|
+
const py = lufsToY(integratedLufs, height);
|
|
81
|
+
context.beginPath();
|
|
82
|
+
context.setLineDash([6, 4]);
|
|
83
|
+
context.moveTo(0, py);
|
|
84
|
+
context.lineTo(width, py);
|
|
85
|
+
context.strokeStyle = color;
|
|
86
|
+
context.lineWidth = 1.5;
|
|
87
|
+
context.stroke();
|
|
88
|
+
context.setLineDash([]);
|
|
89
|
+
}
|
|
90
|
+
var LoudnessCanvas = ({ computeResult, rmsEnvelope = true, momentary = false, shortTerm = false, integrated = true, truePeak = false, colors }) => {
|
|
91
|
+
const canvasRef = useRef(null);
|
|
92
|
+
const resolvedColors = {
|
|
93
|
+
rms: colors?.rms ?? DEFAULT_COLORS.rms,
|
|
94
|
+
momentary: colors?.momentary ?? DEFAULT_COLORS.momentary,
|
|
95
|
+
shortTerm: colors?.shortTerm ?? DEFAULT_COLORS.shortTerm,
|
|
96
|
+
integrated: colors?.integrated ?? DEFAULT_COLORS.integrated,
|
|
97
|
+
truePeak: colors?.truePeak ?? DEFAULT_COLORS.truePeak
|
|
98
|
+
};
|
|
99
|
+
const dimensions = computeResult.status === "ready" ? computeResult.options.sampleQuery : { width: 0, height: 0 };
|
|
100
|
+
const { width, height } = dimensions;
|
|
101
|
+
useEffect(() => {
|
|
102
|
+
const canvasElement = canvasRef.current;
|
|
103
|
+
if (!canvasElement) return;
|
|
104
|
+
const context = canvasElement.getContext("2d");
|
|
105
|
+
if (!context) return;
|
|
106
|
+
context.clearRect(0, 0, width, height);
|
|
107
|
+
if (computeResult.status !== "ready") return;
|
|
108
|
+
const { loudnessData } = computeResult;
|
|
109
|
+
if (!loudnessData || loudnessData.pointCount < 2) return;
|
|
110
|
+
if (rmsEnvelope) {
|
|
111
|
+
drawRmsEnvelope(loudnessData, resolvedColors.rms, dimensions, context);
|
|
112
|
+
}
|
|
113
|
+
if (momentary) {
|
|
114
|
+
drawLufsLine(loudnessData.momentaryLufs, loudnessData.pointCount, resolvedColors.momentary, dimensions, context);
|
|
115
|
+
}
|
|
116
|
+
if (shortTerm) {
|
|
117
|
+
drawLufsLine(loudnessData.shortTermLufs, loudnessData.pointCount, resolvedColors.shortTerm, dimensions, context);
|
|
118
|
+
}
|
|
119
|
+
if (integrated) {
|
|
120
|
+
drawIntegratedLine(loudnessData.integratedLufs, resolvedColors.integrated, dimensions, context);
|
|
121
|
+
}
|
|
122
|
+
if (truePeak && loudnessData.truePeak !== void 0) {
|
|
123
|
+
drawAmplitudeLine(loudnessData.truePeak, resolvedColors.truePeak, dimensions, context);
|
|
124
|
+
}
|
|
125
|
+
}, [computeResult, width, height, rmsEnvelope, momentary, shortTerm, integrated, truePeak, resolvedColors.rms, resolvedColors.momentary, resolvedColors.shortTerm, resolvedColors.integrated, resolvedColors.truePeak]);
|
|
126
|
+
if (computeResult.status !== "ready" || !computeResult.loudnessData) return null;
|
|
127
|
+
return /* @__PURE__ */ jsx(
|
|
128
|
+
"canvas",
|
|
129
|
+
{
|
|
130
|
+
ref: canvasRef,
|
|
131
|
+
width,
|
|
132
|
+
height
|
|
133
|
+
}
|
|
134
|
+
);
|
|
135
|
+
};
|
|
136
|
+
|
|
137
|
+
// src/engine/shaders.ts
|
|
138
|
+
var FFT_PIPELINE_SHADER = (
|
|
139
|
+
/* wgsl */
|
|
140
|
+
`
|
|
141
|
+
|
|
142
|
+
override WORKGROUP_SIZE: u32;
|
|
143
|
+
override FFT_SIZE: u32;
|
|
144
|
+
|
|
145
|
+
struct Uniforms {
|
|
146
|
+
fft_size: u32,
|
|
147
|
+
chunk_offset: u32,
|
|
148
|
+
num_bands: u32,
|
|
149
|
+
use_band_mapping: u32,
|
|
150
|
+
hop_size: u32,
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
@group(0) @binding(0) var<storage, read> input_samples: array<f32>;
|
|
154
|
+
@group(0) @binding(1) var<storage, read_write> magnitude_output: array<f32>;
|
|
155
|
+
@group(0) @binding(2) var<storage, read> band_mapping: array<f32>;
|
|
156
|
+
@group(0) @binding(3) var<uniform> uniforms: Uniforms;
|
|
157
|
+
|
|
158
|
+
var<workgroup> shared_re: array<f32, FFT_SIZE>;
|
|
159
|
+
var<workgroup> shared_im: array<f32, FFT_SIZE>;
|
|
160
|
+
|
|
161
|
+
fn bit_reverse(value: u32, bits: u32) -> u32 {
|
|
162
|
+
var reversed: u32 = 0u;
|
|
163
|
+
var remaining = value;
|
|
164
|
+
for (var bit: u32 = 0u; bit < bits; bit = bit + 1u) {
|
|
165
|
+
reversed = (reversed << 1u) | (remaining & 1u);
|
|
166
|
+
remaining = remaining >> 1u;
|
|
167
|
+
}
|
|
168
|
+
return reversed;
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
@compute @workgroup_size(WORKGROUP_SIZE)
|
|
172
|
+
fn main(
|
|
173
|
+
@builtin(workgroup_id) workgroup_id: vec3<u32>,
|
|
174
|
+
@builtin(local_invocation_id) local_id: vec3<u32>,
|
|
175
|
+
) {
|
|
176
|
+
let hop_index = workgroup_id.x;
|
|
177
|
+
let thread_id = local_id.x;
|
|
178
|
+
let fft_size = uniforms.fft_size;
|
|
179
|
+
let half_fft = fft_size / 2u;
|
|
180
|
+
let log2_fft = u32(log2(f32(fft_size)));
|
|
181
|
+
let hop_size = uniforms.hop_size;
|
|
182
|
+
let sample_base = hop_index * hop_size;
|
|
183
|
+
|
|
184
|
+
// Load samples with bit-reversal permutation and Hann window
|
|
185
|
+
let threads_per_load = WORKGROUP_SIZE;
|
|
186
|
+
for (var offset: u32 = thread_id; offset < fft_size; offset = offset + threads_per_load) {
|
|
187
|
+
let reversed = bit_reverse(offset, log2_fft);
|
|
188
|
+
let sample_index = sample_base + offset;
|
|
189
|
+
var sample_value: f32 = 0.0;
|
|
190
|
+
if (sample_index < arrayLength(&input_samples)) {
|
|
191
|
+
sample_value = input_samples[sample_index];
|
|
192
|
+
}
|
|
193
|
+
// Hann window
|
|
194
|
+
let hann = 0.5 * (1.0 - cos(2.0 * 3.14159265358979323846 * f32(offset) / f32(fft_size - 1u)));
|
|
195
|
+
shared_re[reversed] = sample_value * hann;
|
|
196
|
+
shared_im[reversed] = 0.0;
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
workgroupBarrier();
|
|
200
|
+
|
|
201
|
+
// Butterfly stages
|
|
202
|
+
for (var stage: u32 = 0u; stage < log2_fft; stage = stage + 1u) {
|
|
203
|
+
let block_size = 1u << (stage + 1u);
|
|
204
|
+
let half_block = 1u << stage;
|
|
205
|
+
|
|
206
|
+
for (var index: u32 = thread_id; index < half_fft; index = index + threads_per_load) {
|
|
207
|
+
let block_index = index / half_block;
|
|
208
|
+
let inner_index = index % half_block;
|
|
209
|
+
let top = block_index * block_size + inner_index;
|
|
210
|
+
let bottom = top + half_block;
|
|
211
|
+
|
|
212
|
+
let angle = -2.0 * 3.14159265358979323846 * f32(inner_index) / f32(block_size);
|
|
213
|
+
let twiddle_re = cos(angle);
|
|
214
|
+
let twiddle_im = sin(angle);
|
|
215
|
+
|
|
216
|
+
let bottom_re = shared_re[bottom] * twiddle_re - shared_im[bottom] * twiddle_im;
|
|
217
|
+
let bottom_im = shared_re[bottom] * twiddle_im + shared_im[bottom] * twiddle_re;
|
|
218
|
+
|
|
219
|
+
let top_re = shared_re[top];
|
|
220
|
+
let top_im = shared_im[top];
|
|
221
|
+
|
|
222
|
+
shared_re[top] = top_re + bottom_re;
|
|
223
|
+
shared_im[top] = top_im + bottom_im;
|
|
224
|
+
shared_re[bottom] = top_re - bottom_re;
|
|
225
|
+
shared_im[bottom] = top_im - bottom_im;
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
workgroupBarrier();
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
// Compute magnitudes and write output
|
|
232
|
+
let num_bins = half_fft + 1u;
|
|
233
|
+
let scale_factor = 2.0 / f32(fft_size);
|
|
234
|
+
let frame_index = uniforms.chunk_offset + hop_index;
|
|
235
|
+
|
|
236
|
+
if (uniforms.use_band_mapping == 0u) {
|
|
237
|
+
// Linear scale: write raw magnitudes
|
|
238
|
+
for (var bin: u32 = thread_id; bin < num_bins; bin = bin + threads_per_load) {
|
|
239
|
+
let re = shared_re[bin];
|
|
240
|
+
let im = shared_im[bin];
|
|
241
|
+
let magnitude = sqrt(re * re + im * im) * scale_factor;
|
|
242
|
+
magnitude_output[frame_index * uniforms.num_bands + bin] = magnitude;
|
|
243
|
+
}
|
|
244
|
+
} else {
|
|
245
|
+
// Non-linear scale: apply band mapping
|
|
246
|
+
for (var band: u32 = thread_id; band < uniforms.num_bands; band = band + threads_per_load) {
|
|
247
|
+
let mapping_offset = band * 4u;
|
|
248
|
+
let bin_start = u32(band_mapping[mapping_offset]);
|
|
249
|
+
let bin_end = u32(band_mapping[mapping_offset + 1u]);
|
|
250
|
+
let weight_start = band_mapping[mapping_offset + 2u];
|
|
251
|
+
let weight_end = band_mapping[mapping_offset + 3u];
|
|
252
|
+
|
|
253
|
+
var accumulated: f32 = 0.0;
|
|
254
|
+
for (var bin: u32 = bin_start; bin <= bin_end; bin = bin + 1u) {
|
|
255
|
+
let re = shared_re[bin];
|
|
256
|
+
let im = shared_im[bin];
|
|
257
|
+
let magnitude = sqrt(re * re + im * im) * scale_factor;
|
|
258
|
+
|
|
259
|
+
var weight: f32 = 1.0;
|
|
260
|
+
if (bin == bin_start) {
|
|
261
|
+
weight = weight_start;
|
|
262
|
+
} else if (bin == bin_end) {
|
|
263
|
+
weight = weight_end;
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
accumulated = accumulated + magnitude * weight;
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
var weight_sum: f32;
|
|
270
|
+
if (bin_start == bin_end) {
|
|
271
|
+
weight_sum = weight_start;
|
|
272
|
+
} else {
|
|
273
|
+
weight_sum = weight_start + weight_end + f32(bin_end - bin_start - 1u);
|
|
274
|
+
}
|
|
275
|
+
if (weight_sum > 0.0) {
|
|
276
|
+
accumulated = accumulated / weight_sum;
|
|
277
|
+
}
|
|
278
|
+
magnitude_output[frame_index * uniforms.num_bands + band] = accumulated;
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
`
|
|
283
|
+
);
|
|
284
|
+
var SPECTROGRAM_VISUALIZE_SHADER = (
|
|
285
|
+
/* wgsl */
|
|
286
|
+
`
|
|
287
|
+
|
|
288
|
+
struct Uniforms {
|
|
289
|
+
total_frames: u32,
|
|
290
|
+
num_bands: u32,
|
|
291
|
+
output_width: u32,
|
|
292
|
+
output_height: u32,
|
|
293
|
+
db_min: f32,
|
|
294
|
+
db_max: f32,
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
@group(0) @binding(0) var<storage, read> magnitude_buffer: array<f32>;
|
|
298
|
+
@group(0) @binding(1) var<storage, read> colormap_buffer: array<u32>;
|
|
299
|
+
@group(0) @binding(2) var output_texture: texture_storage_2d<rgba8unorm, write>;
|
|
300
|
+
@group(0) @binding(3) var<uniform> uniforms: Uniforms;
|
|
301
|
+
|
|
302
|
+
@compute @workgroup_size(64, 1)
|
|
303
|
+
fn main(@builtin(global_invocation_id) global_id: vec3<u32>) {
|
|
304
|
+
let column = global_id.x;
|
|
305
|
+
let pixel_row = global_id.y;
|
|
306
|
+
|
|
307
|
+
if (column >= uniforms.output_width || pixel_row >= uniforms.output_height) {
|
|
308
|
+
return;
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
// Map pixel row to band with interpolation
|
|
312
|
+
let band_float = f32(pixel_row) * f32(uniforms.num_bands - 1u) / f32(max(1u, uniforms.output_height - 1u));
|
|
313
|
+
let band_low = u32(band_float);
|
|
314
|
+
let band_high = min(band_low + 1u, uniforms.num_bands - 1u);
|
|
315
|
+
let band_frac = band_float - f32(band_low);
|
|
316
|
+
|
|
317
|
+
let stride = f32(uniforms.total_frames) / f32(uniforms.output_width);
|
|
318
|
+
|
|
319
|
+
var max_magnitude: f32 = 0.0;
|
|
320
|
+
|
|
321
|
+
if (stride >= 1.0) {
|
|
322
|
+
let frame_start = u32(f32(column) * stride);
|
|
323
|
+
let frame_end = min(u32(f32(column + 1u) * stride), uniforms.total_frames);
|
|
324
|
+
|
|
325
|
+
var max_mag_low: f32 = 0.0;
|
|
326
|
+
var max_mag_high: f32 = 0.0;
|
|
327
|
+
|
|
328
|
+
for (var frame: u32 = frame_start; frame < frame_end; frame = frame + 1u) {
|
|
329
|
+
max_mag_low = max(max_mag_low, magnitude_buffer[frame * uniforms.num_bands + band_low]);
|
|
330
|
+
max_mag_high = max(max_mag_high, magnitude_buffer[frame * uniforms.num_bands + band_high]);
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
max_magnitude = max_mag_low * (1.0 - band_frac) + max_mag_high * band_frac;
|
|
334
|
+
} else {
|
|
335
|
+
let exact_frame = f32(column) * stride;
|
|
336
|
+
let frame_low = min(u32(exact_frame), uniforms.total_frames - 1u);
|
|
337
|
+
let frame_high = min(frame_low + 1u, uniforms.total_frames - 1u);
|
|
338
|
+
let frame_frac = exact_frame - f32(frame_low);
|
|
339
|
+
|
|
340
|
+
let mag_ll = magnitude_buffer[frame_low * uniforms.num_bands + band_low];
|
|
341
|
+
let mag_lh = magnitude_buffer[frame_low * uniforms.num_bands + band_high];
|
|
342
|
+
let mag_hl = magnitude_buffer[frame_high * uniforms.num_bands + band_low];
|
|
343
|
+
let mag_hh = magnitude_buffer[frame_high * uniforms.num_bands + band_high];
|
|
344
|
+
|
|
345
|
+
let mag_low_interp = mag_ll * (1.0 - frame_frac) + mag_hl * frame_frac;
|
|
346
|
+
let mag_high_interp = mag_lh * (1.0 - frame_frac) + mag_hh * frame_frac;
|
|
347
|
+
|
|
348
|
+
max_magnitude = mag_low_interp * (1.0 - band_frac) + mag_high_interp * band_frac;
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
// Convert to dB
|
|
352
|
+
let db_value = 20.0 * log(max(max_magnitude, 1e-10)) / log(10.0);
|
|
353
|
+
let db_range = uniforms.db_max - uniforms.db_min;
|
|
354
|
+
let normalized = clamp((db_value - uniforms.db_min) / db_range, 0.0, 1.0);
|
|
355
|
+
|
|
356
|
+
// Index into colormap (256 entries)
|
|
357
|
+
let colormap_index = u32(normalized * 255.0);
|
|
358
|
+
let packed = colormap_buffer[colormap_index];
|
|
359
|
+
let red = f32(packed & 0xFFu) / 255.0;
|
|
360
|
+
let green = f32((packed >> 8u) & 0xFFu) / 255.0;
|
|
361
|
+
let blue = f32((packed >> 16u) & 0xFFu) / 255.0;
|
|
362
|
+
let alpha = f32((packed >> 24u) & 0xFFu) / 255.0;
|
|
363
|
+
|
|
364
|
+
let pixel_y = uniforms.output_height - 1u - pixel_row;
|
|
365
|
+
textureStore(output_texture, vec2<i32>(i32(column), i32(pixel_y)), vec4<f32>(red, green, blue, alpha));
|
|
366
|
+
}
|
|
367
|
+
`
|
|
368
|
+
);
|
|
369
|
+
var WAVEFORM_VISUALIZE_SHADER = (
|
|
370
|
+
/* wgsl */
|
|
371
|
+
`
|
|
372
|
+
|
|
373
|
+
struct Uniforms {
|
|
374
|
+
total_points: u32,
|
|
375
|
+
output_width: u32,
|
|
376
|
+
output_height: u32,
|
|
377
|
+
waveform_color_r: f32,
|
|
378
|
+
waveform_color_g: f32,
|
|
379
|
+
waveform_color_b: f32,
|
|
380
|
+
}
|
|
381
|
+
|
|
382
|
+
@group(0) @binding(0) var<storage, read> waveform_buffer: array<f32>;
|
|
383
|
+
@group(0) @binding(1) var output_texture: texture_storage_2d<rgba8unorm, write>;
|
|
384
|
+
@group(0) @binding(2) var<uniform> uniforms: Uniforms;
|
|
385
|
+
|
|
386
|
+
@compute @workgroup_size(64)
|
|
387
|
+
fn main(@builtin(global_invocation_id) global_id: vec3<u32>) {
|
|
388
|
+
let column = global_id.x;
|
|
389
|
+
|
|
390
|
+
if (column >= uniforms.output_width) {
|
|
391
|
+
return;
|
|
392
|
+
}
|
|
393
|
+
|
|
394
|
+
let stride = f32(uniforms.total_points) / f32(uniforms.output_width);
|
|
395
|
+
let point_start = min(u32(f32(column) * stride), uniforms.total_points - 1u);
|
|
396
|
+
let point_end = max(point_start + 1u, min(u32(f32(column + 1u) * stride), uniforms.total_points));
|
|
397
|
+
|
|
398
|
+
var min_val: f32 = 1.0;
|
|
399
|
+
var max_val: f32 = -1.0;
|
|
400
|
+
|
|
401
|
+
for (var point: u32 = point_start; point < point_end; point = point + 1u) {
|
|
402
|
+
let min_sample = waveform_buffer[point * 2u];
|
|
403
|
+
let max_sample = waveform_buffer[point * 2u + 1u];
|
|
404
|
+
min_val = min(min_val, min_sample);
|
|
405
|
+
max_val = max(max_val, max_sample);
|
|
406
|
+
}
|
|
407
|
+
|
|
408
|
+
let half_height = f32(uniforms.output_height) / 2.0;
|
|
409
|
+
|
|
410
|
+
let y_min_pixel = u32(clamp(half_height - max_val * half_height, 0.0, f32(uniforms.output_height - 1u)));
|
|
411
|
+
let y_max_pixel = u32(clamp(half_height - min_val * half_height, 0.0, f32(uniforms.output_height - 1u)));
|
|
412
|
+
|
|
413
|
+
let color = vec4<f32>(
|
|
414
|
+
uniforms.waveform_color_r / 255.0,
|
|
415
|
+
uniforms.waveform_color_g / 255.0,
|
|
416
|
+
uniforms.waveform_color_b / 255.0,
|
|
417
|
+
1.0,
|
|
418
|
+
);
|
|
419
|
+
let transparent = vec4<f32>(0.0, 0.0, 0.0, 0.0);
|
|
420
|
+
|
|
421
|
+
for (var pixel_y: u32 = 0u; pixel_y < uniforms.output_height; pixel_y = pixel_y + 1u) {
|
|
422
|
+
if (pixel_y >= y_min_pixel && pixel_y <= y_max_pixel) {
|
|
423
|
+
textureStore(output_texture, vec2<i32>(i32(column), i32(pixel_y)), color);
|
|
424
|
+
} else {
|
|
425
|
+
textureStore(output_texture, vec2<i32>(i32(column), i32(pixel_y)), transparent);
|
|
426
|
+
}
|
|
427
|
+
}
|
|
428
|
+
}
|
|
429
|
+
`
|
|
430
|
+
);
|
|
431
|
+
var BLIT_VERTEX_SHADER = (
|
|
432
|
+
/* wgsl */
|
|
433
|
+
`
|
|
434
|
+
|
|
435
|
+
struct VertexOutput {
|
|
436
|
+
@builtin(position) position: vec4<f32>,
|
|
437
|
+
@location(0) uv: vec2<f32>,
|
|
438
|
+
}
|
|
439
|
+
|
|
440
|
+
@vertex
|
|
441
|
+
fn main(@builtin(vertex_index) vertex_index: u32) -> VertexOutput {
|
|
442
|
+
// Fullscreen quad from two triangles using vertex_index
|
|
443
|
+
var positions = array<vec2<f32>, 6>(
|
|
444
|
+
vec2<f32>(-1.0, -1.0),
|
|
445
|
+
vec2<f32>(1.0, -1.0),
|
|
446
|
+
vec2<f32>(-1.0, 1.0),
|
|
447
|
+
vec2<f32>(-1.0, 1.0),
|
|
448
|
+
vec2<f32>(1.0, -1.0),
|
|
449
|
+
vec2<f32>(1.0, 1.0),
|
|
450
|
+
);
|
|
451
|
+
|
|
452
|
+
var uvs = array<vec2<f32>, 6>(
|
|
453
|
+
vec2<f32>(0.0, 1.0),
|
|
454
|
+
vec2<f32>(1.0, 1.0),
|
|
455
|
+
vec2<f32>(0.0, 0.0),
|
|
456
|
+
vec2<f32>(0.0, 0.0),
|
|
457
|
+
vec2<f32>(1.0, 1.0),
|
|
458
|
+
vec2<f32>(1.0, 0.0),
|
|
459
|
+
);
|
|
460
|
+
|
|
461
|
+
var output: VertexOutput;
|
|
462
|
+
output.position = vec4<f32>(positions[vertex_index], 0.0, 1.0);
|
|
463
|
+
output.uv = uvs[vertex_index];
|
|
464
|
+
return output;
|
|
465
|
+
}
|
|
466
|
+
`
|
|
467
|
+
);
|
|
468
|
+
var BLIT_FRAGMENT_SHADER = (
|
|
469
|
+
/* wgsl */
|
|
470
|
+
`
|
|
471
|
+
|
|
472
|
+
@group(0) @binding(0) var source_texture: texture_2d<f32>;
|
|
473
|
+
@group(0) @binding(1) var source_sampler: sampler;
|
|
474
|
+
|
|
475
|
+
@fragment
|
|
476
|
+
fn main(@location(0) uv: vec2<f32>) -> @location(0) vec4<f32> {
|
|
477
|
+
return textureSample(source_texture, source_sampler, uv);
|
|
478
|
+
}
|
|
479
|
+
`
|
|
480
|
+
);
|
|
481
|
+
|
|
482
|
+
// src/engine/blit.ts
|
|
483
|
+
var BlitRenderer = class {
|
|
484
|
+
constructor(device, canvas) {
|
|
485
|
+
this.device = device;
|
|
486
|
+
const context = canvas.getContext("webgpu");
|
|
487
|
+
if (!context) {
|
|
488
|
+
throw new Error("Failed to get WebGPU canvas context");
|
|
489
|
+
}
|
|
490
|
+
this.context = context;
|
|
491
|
+
this.canvasFormat = navigator.gpu.getPreferredCanvasFormat();
|
|
492
|
+
this.context.configure({
|
|
493
|
+
device: this.device,
|
|
494
|
+
format: this.canvasFormat,
|
|
495
|
+
alphaMode: "premultiplied"
|
|
496
|
+
});
|
|
497
|
+
this.sampler = device.createSampler({
|
|
498
|
+
magFilter: "linear",
|
|
499
|
+
minFilter: "linear"
|
|
500
|
+
});
|
|
501
|
+
const vertexModule = device.createShaderModule({ code: BLIT_VERTEX_SHADER });
|
|
502
|
+
const fragmentModule = device.createShaderModule({ code: BLIT_FRAGMENT_SHADER });
|
|
503
|
+
this.pipeline = device.createRenderPipeline({
|
|
504
|
+
layout: "auto",
|
|
505
|
+
vertex: {
|
|
506
|
+
module: vertexModule,
|
|
507
|
+
entryPoint: "main"
|
|
508
|
+
},
|
|
509
|
+
fragment: {
|
|
510
|
+
module: fragmentModule,
|
|
511
|
+
entryPoint: "main",
|
|
512
|
+
targets: [{ format: this.canvasFormat }]
|
|
513
|
+
},
|
|
514
|
+
primitive: {
|
|
515
|
+
topology: "triangle-list"
|
|
516
|
+
}
|
|
517
|
+
});
|
|
518
|
+
}
|
|
519
|
+
render(texture) {
|
|
520
|
+
const textureView = texture.createView();
|
|
521
|
+
const bindGroup = this.device.createBindGroup({
|
|
522
|
+
layout: this.pipeline.getBindGroupLayout(0),
|
|
523
|
+
entries: [
|
|
524
|
+
{ binding: 0, resource: textureView },
|
|
525
|
+
{ binding: 1, resource: this.sampler }
|
|
526
|
+
]
|
|
527
|
+
});
|
|
528
|
+
const commandEncoder = this.device.createCommandEncoder();
|
|
529
|
+
const renderPass = commandEncoder.beginRenderPass({
|
|
530
|
+
colorAttachments: [
|
|
531
|
+
{
|
|
532
|
+
view: this.context.getCurrentTexture().createView(),
|
|
533
|
+
clearValue: { r: 0, g: 0, b: 0, a: 1 },
|
|
534
|
+
loadOp: "clear",
|
|
535
|
+
storeOp: "store"
|
|
536
|
+
}
|
|
537
|
+
]
|
|
538
|
+
});
|
|
539
|
+
renderPass.setPipeline(this.pipeline);
|
|
540
|
+
renderPass.setBindGroup(0, bindGroup);
|
|
541
|
+
renderPass.draw(6);
|
|
542
|
+
renderPass.end();
|
|
543
|
+
this.device.queue.submit([commandEncoder.finish()]);
|
|
544
|
+
}
|
|
545
|
+
resize(width, height) {
|
|
546
|
+
const canvas = this.context.canvas;
|
|
547
|
+
canvas.width = width;
|
|
548
|
+
canvas.height = height;
|
|
549
|
+
this.context.configure({
|
|
550
|
+
device: this.device,
|
|
551
|
+
format: this.canvasFormat,
|
|
552
|
+
alphaMode: "premultiplied"
|
|
553
|
+
});
|
|
554
|
+
}
|
|
555
|
+
destroy() {
|
|
556
|
+
this.context.unconfigure();
|
|
557
|
+
}
|
|
558
|
+
};
|
|
559
|
+
function useCanvasRef(ref) {
|
|
560
|
+
const internalCanvasReference = useRef(null);
|
|
561
|
+
const externalRefRef = useRef(ref);
|
|
562
|
+
externalRefRef.current = ref;
|
|
563
|
+
const canvasCallback = useCallback((canvas) => {
|
|
564
|
+
internalCanvasReference.current = canvas;
|
|
565
|
+
const externalRef = externalRefRef.current;
|
|
566
|
+
if (typeof externalRef === "function") {
|
|
567
|
+
externalRef(canvas);
|
|
568
|
+
} else if (externalRef) {
|
|
569
|
+
externalRef.current = canvas;
|
|
570
|
+
}
|
|
571
|
+
}, []);
|
|
572
|
+
return [internalCanvasReference, canvasCallback];
|
|
573
|
+
}
|
|
574
|
+
var SpectrogramCanvas = ({ computeResult, ref }) => {
|
|
575
|
+
const [internalCanvasReference, canvasCallback] = useCanvasRef(ref);
|
|
576
|
+
const blitReference = useRef(null);
|
|
577
|
+
const blitDeviceRef = useRef(null);
|
|
578
|
+
useEffect(() => {
|
|
579
|
+
const canvas = internalCanvasReference.current;
|
|
580
|
+
if (!canvas || computeResult.status !== "ready" || !computeResult.spectrogramTexture) {
|
|
581
|
+
return;
|
|
582
|
+
}
|
|
583
|
+
const { device } = computeResult.options.config;
|
|
584
|
+
const { width: width2, height: height2 } = computeResult.options.sampleQuery;
|
|
585
|
+
if (blitReference.current && blitDeviceRef.current !== device) {
|
|
586
|
+
blitReference.current.destroy();
|
|
587
|
+
blitReference.current = null;
|
|
588
|
+
}
|
|
589
|
+
blitReference.current ?? (blitReference.current = new BlitRenderer(device, canvas));
|
|
590
|
+
blitDeviceRef.current = device;
|
|
591
|
+
blitReference.current.resize(width2, height2);
|
|
592
|
+
blitReference.current.render(computeResult.spectrogramTexture);
|
|
593
|
+
}, [computeResult]);
|
|
594
|
+
useEffect(
|
|
595
|
+
() => () => {
|
|
596
|
+
blitReference.current?.destroy();
|
|
597
|
+
blitReference.current = null;
|
|
598
|
+
},
|
|
599
|
+
[]
|
|
600
|
+
);
|
|
601
|
+
const { width, height } = computeResult.status === "ready" ? computeResult.options.sampleQuery : { width: 0, height: 0 };
|
|
602
|
+
return /* @__PURE__ */ jsx(
|
|
603
|
+
"canvas",
|
|
604
|
+
{
|
|
605
|
+
ref: canvasCallback,
|
|
606
|
+
width,
|
|
607
|
+
height
|
|
608
|
+
}
|
|
609
|
+
);
|
|
610
|
+
};
|
|
611
|
+
|
|
612
|
+
// src/engine/band-mapping.ts
|
|
613
|
+
function freqToMel(frequency) {
|
|
614
|
+
return 2595 * Math.log10(1 + frequency / 700);
|
|
615
|
+
}
|
|
616
|
+
function melToFreq(mel) {
|
|
617
|
+
return 700 * (Math.pow(10, mel / 2595) - 1);
|
|
618
|
+
}
|
|
619
|
+
function freqToErb(frequency) {
|
|
620
|
+
return 21.4 * Math.log10(1 + 437e-5 * frequency);
|
|
621
|
+
}
|
|
622
|
+
function erbToFreq(erb) {
|
|
623
|
+
return (Math.pow(10, erb / 21.4) - 1) / 437e-5;
|
|
624
|
+
}
|
|
625
|
+
function computeScaledBandMappings(numBands, minFreq, maxFreq, sampleRate, fftSize, toScale, fromScale) {
|
|
626
|
+
const scaleMin = toScale(minFreq);
|
|
627
|
+
const scaleMax = toScale(maxFreq);
|
|
628
|
+
const scaleStep = (scaleMax - scaleMin) / numBands;
|
|
629
|
+
const binWidth = sampleRate / fftSize;
|
|
630
|
+
const numLinearBins = fftSize / 2 + 1;
|
|
631
|
+
const mappings = [];
|
|
632
|
+
for (let band = 0; band < numBands; band++) {
|
|
633
|
+
const freqLow = fromScale(scaleMin + band * scaleStep);
|
|
634
|
+
const freqHigh = fromScale(scaleMin + (band + 1) * scaleStep);
|
|
635
|
+
const exactBinLow = freqLow / binWidth;
|
|
636
|
+
const exactBinHigh = freqHigh / binWidth;
|
|
637
|
+
const binStart = Math.max(0, Math.floor(exactBinLow));
|
|
638
|
+
const binEnd = Math.min(numLinearBins - 1, Math.ceil(exactBinHigh));
|
|
639
|
+
const weightStart = 1 - (exactBinLow - binStart);
|
|
640
|
+
const weightEnd = 1 - (binEnd - exactBinHigh);
|
|
641
|
+
mappings.push({
|
|
642
|
+
binStart,
|
|
643
|
+
binEnd: Math.max(binStart, binEnd),
|
|
644
|
+
weightStart: Math.max(0, Math.min(1, weightStart)),
|
|
645
|
+
weightEnd: Math.max(0, Math.min(1, weightEnd))
|
|
646
|
+
});
|
|
647
|
+
}
|
|
648
|
+
return mappings;
|
|
649
|
+
}
|
|
650
|
+
function computeBandMappings(scale, numBands, sampleRate, fftSize) {
|
|
651
|
+
if (scale === "linear") {
|
|
652
|
+
return new Float32Array(0);
|
|
653
|
+
}
|
|
654
|
+
const minFreq = 20;
|
|
655
|
+
const maxFreq = sampleRate / 2;
|
|
656
|
+
let mappings;
|
|
657
|
+
if (scale === "log") {
|
|
658
|
+
mappings = computeScaledBandMappings(
|
|
659
|
+
numBands,
|
|
660
|
+
minFreq,
|
|
661
|
+
maxFreq,
|
|
662
|
+
sampleRate,
|
|
663
|
+
fftSize,
|
|
664
|
+
Math.log,
|
|
665
|
+
Math.exp
|
|
666
|
+
);
|
|
667
|
+
} else if (scale === "mel") {
|
|
668
|
+
mappings = computeScaledBandMappings(
|
|
669
|
+
numBands,
|
|
670
|
+
minFreq,
|
|
671
|
+
maxFreq,
|
|
672
|
+
sampleRate,
|
|
673
|
+
fftSize,
|
|
674
|
+
freqToMel,
|
|
675
|
+
melToFreq
|
|
676
|
+
);
|
|
677
|
+
} else {
|
|
678
|
+
mappings = computeScaledBandMappings(
|
|
679
|
+
numBands,
|
|
680
|
+
minFreq,
|
|
681
|
+
maxFreq,
|
|
682
|
+
sampleRate,
|
|
683
|
+
fftSize,
|
|
684
|
+
freqToErb,
|
|
685
|
+
erbToFreq
|
|
686
|
+
);
|
|
687
|
+
}
|
|
688
|
+
const result = new Float32Array(mappings.length * 4);
|
|
689
|
+
for (let index = 0; index < mappings.length; index++) {
|
|
690
|
+
const mapping = mappings[index];
|
|
691
|
+
const offset = index * 4;
|
|
692
|
+
if (!mapping) {
|
|
693
|
+
continue;
|
|
694
|
+
}
|
|
695
|
+
result[offset] = mapping.binStart;
|
|
696
|
+
result[offset + 1] = mapping.binEnd;
|
|
697
|
+
result[offset + 2] = mapping.weightStart;
|
|
698
|
+
result[offset + 3] = mapping.weightEnd;
|
|
699
|
+
}
|
|
700
|
+
return result;
|
|
701
|
+
}
|
|
702
|
+
|
|
703
|
+
// src/utils/lava.ts
|
|
704
|
+
var controlPoints = [
|
|
705
|
+
[0, 0, 0],
|
|
706
|
+
[5, 5, 30],
|
|
707
|
+
[15, 20, 70],
|
|
708
|
+
[30, 15, 50],
|
|
709
|
+
[80, 10, 5],
|
|
710
|
+
[140, 20, 0],
|
|
711
|
+
[185, 55, 0],
|
|
712
|
+
[215, 100, 5],
|
|
713
|
+
[240, 155, 25],
|
|
714
|
+
[252, 210, 70],
|
|
715
|
+
[255, 240, 140],
|
|
716
|
+
[255, 255, 255]
|
|
717
|
+
];
|
|
718
|
+
var lavaColormap = {
|
|
719
|
+
colors: controlPoints.map((color, index) => ({
|
|
720
|
+
position: index / (controlPoints.length - 1),
|
|
721
|
+
color
|
|
722
|
+
}))
|
|
723
|
+
};
|
|
724
|
+
|
|
725
|
+
// src/utils/viridis.ts
|
|
726
|
+
var controlPoints2 = [
|
|
727
|
+
[68, 1, 84],
|
|
728
|
+
[72, 35, 116],
|
|
729
|
+
[64, 68, 135],
|
|
730
|
+
[52, 96, 141],
|
|
731
|
+
[33, 137, 136],
|
|
732
|
+
[26, 158, 123],
|
|
733
|
+
[42, 182, 91],
|
|
734
|
+
[118, 191, 47],
|
|
735
|
+
[168, 186, 35],
|
|
736
|
+
[208, 200, 29],
|
|
737
|
+
[240, 218, 28],
|
|
738
|
+
[253, 231, 37]
|
|
739
|
+
];
|
|
740
|
+
var viridisColormap = {
|
|
741
|
+
colors: controlPoints2.map((color, index) => ({
|
|
742
|
+
position: index / (controlPoints2.length - 1),
|
|
743
|
+
color
|
|
744
|
+
}))
|
|
745
|
+
};
|
|
746
|
+
|
|
747
|
+
// src/engine/colormap.ts
|
|
748
|
+
function generateColormapBuffer(definition) {
|
|
749
|
+
const buffer = new Uint8Array(256 * 4);
|
|
750
|
+
const { colors } = definition;
|
|
751
|
+
for (let index = 0; index < 256; index++) {
|
|
752
|
+
const position = index / 255;
|
|
753
|
+
let lowerIndex = 0;
|
|
754
|
+
for (let ci = 0; ci < colors.length - 1; ci++) {
|
|
755
|
+
const entry = colors[ci];
|
|
756
|
+
if (entry && entry.position <= position) {
|
|
757
|
+
lowerIndex = ci;
|
|
758
|
+
}
|
|
759
|
+
}
|
|
760
|
+
const lower = colors[lowerIndex];
|
|
761
|
+
const upper = colors[Math.min(lowerIndex + 1, colors.length - 1)];
|
|
762
|
+
if (!lower || !upper) {
|
|
763
|
+
continue;
|
|
764
|
+
}
|
|
765
|
+
const range = upper.position - lower.position;
|
|
766
|
+
const interpolation = range === 0 ? 0 : (position - lower.position) / range;
|
|
767
|
+
const clamped = Math.max(0, Math.min(1, interpolation));
|
|
768
|
+
const offset = index * 4;
|
|
769
|
+
buffer[offset] = Math.round(lower.color[0] + (upper.color[0] - lower.color[0]) * clamped);
|
|
770
|
+
buffer[offset + 1] = Math.round(lower.color[1] + (upper.color[1] - lower.color[1]) * clamped);
|
|
771
|
+
buffer[offset + 2] = Math.round(lower.color[2] + (upper.color[2] - lower.color[2]) * clamped);
|
|
772
|
+
buffer[offset + 3] = 255;
|
|
773
|
+
}
|
|
774
|
+
return buffer;
|
|
775
|
+
}
|
|
776
|
+
function resolveColormap(colormap) {
|
|
777
|
+
if (colormap === "lava") {
|
|
778
|
+
return lavaColormap;
|
|
779
|
+
}
|
|
780
|
+
if (colormap === "viridis") {
|
|
781
|
+
return viridisColormap;
|
|
782
|
+
}
|
|
783
|
+
return colormap;
|
|
784
|
+
}
|
|
785
|
+
function resolveWaveformColor(colormap, override) {
|
|
786
|
+
if (override) {
|
|
787
|
+
return override;
|
|
788
|
+
}
|
|
789
|
+
if (colormap === "lava") {
|
|
790
|
+
return [40, 135, 180];
|
|
791
|
+
}
|
|
792
|
+
if (colormap === "viridis") {
|
|
793
|
+
return [180, 115, 42];
|
|
794
|
+
}
|
|
795
|
+
return [200, 200, 200];
|
|
796
|
+
}
|
|
797
|
+
|
|
798
|
+
// src/engine/device.ts
|
|
799
|
+
var cachedDevicePromise = null;
|
|
800
|
+
async function getDevice(provided) {
|
|
801
|
+
if (provided) {
|
|
802
|
+
void provided.lost.then((info) => {
|
|
803
|
+
console.error(`WebGPU device was lost: ${info.message}`);
|
|
804
|
+
});
|
|
805
|
+
checkSharedMemoryLimit(provided);
|
|
806
|
+
return provided;
|
|
807
|
+
}
|
|
808
|
+
if (cachedDevicePromise) return cachedDevicePromise;
|
|
809
|
+
cachedDevicePromise = createDevice();
|
|
810
|
+
return cachedDevicePromise;
|
|
811
|
+
}
|
|
812
|
+
async function createDevice() {
|
|
813
|
+
const gpu = navigator.gpu;
|
|
814
|
+
if (!gpu) {
|
|
815
|
+
throw new Error("WebGPU is not supported in this browser");
|
|
816
|
+
}
|
|
817
|
+
const adapter = await gpu.requestAdapter();
|
|
818
|
+
if (!adapter) {
|
|
819
|
+
throw new Error("Failed to obtain WebGPU adapter");
|
|
820
|
+
}
|
|
821
|
+
const device = await adapter.requestDevice({
|
|
822
|
+
requiredLimits: {
|
|
823
|
+
maxComputeWorkgroupStorageSize: adapter.limits.maxComputeWorkgroupStorageSize
|
|
824
|
+
}
|
|
825
|
+
});
|
|
826
|
+
void device.lost.then((info) => {
|
|
827
|
+
console.error(`WebGPU device was lost: ${info.message}`);
|
|
828
|
+
cachedDevicePromise = null;
|
|
829
|
+
});
|
|
830
|
+
checkSharedMemoryLimit(device);
|
|
831
|
+
return device;
|
|
832
|
+
}
|
|
833
|
+
function getMaxFftSize(device) {
|
|
834
|
+
const maxStorage = device.limits.maxComputeWorkgroupStorageSize;
|
|
835
|
+
const maxFft = Math.floor(maxStorage / (2 * 4));
|
|
836
|
+
return Math.pow(2, Math.floor(Math.log2(maxFft)));
|
|
837
|
+
}
|
|
838
|
+
function checkSharedMemoryLimit(device) {
|
|
839
|
+
const maxStorage = device.limits.maxComputeWorkgroupStorageSize;
|
|
840
|
+
const maxFft = getMaxFftSize(device);
|
|
841
|
+
if (maxStorage < 32768) {
|
|
842
|
+
console.warn(
|
|
843
|
+
`WebGPU device shared memory is ${maxStorage} bytes. FFT size will be clamped to ${maxFft} (max supported by this device).`
|
|
844
|
+
);
|
|
845
|
+
}
|
|
846
|
+
}
|
|
847
|
+
|
|
848
|
+
// src/engine/SpectralEngine.ts
|
|
849
|
+
var DEFAULT_NON_LINEAR_BANDS = 512;
|
|
850
|
+
var HOP_OVERLAP_FACTOR = 4;
|
|
851
|
+
var MAX_INPUT_BUFFER_SAMPLES = 131072;
|
|
852
|
+
async function checkShaderCompilation(module, label) {
|
|
853
|
+
const info = await module.getCompilationInfo();
|
|
854
|
+
const errors = info.messages.filter((entry) => entry.type === "error");
|
|
855
|
+
if (errors.length > 0) {
|
|
856
|
+
const details = errors.map((entry) => entry.message).join("\n");
|
|
857
|
+
throw new Error(`Shader compilation failed (${label}):
|
|
858
|
+
${details}`);
|
|
859
|
+
}
|
|
860
|
+
}
|
|
861
|
+
function resolveConfig(config) {
|
|
862
|
+
const colormapInput = config.colormap ?? "lava";
|
|
863
|
+
const resolvedColormap = resolveColormap(colormapInput);
|
|
864
|
+
const waveformColor = resolveWaveformColor(colormapInput, config.waveformColor);
|
|
865
|
+
return {
|
|
866
|
+
...config,
|
|
867
|
+
fftSize: config.fftSize ?? 4096,
|
|
868
|
+
frequencyScale: config.frequencyScale ?? "log",
|
|
869
|
+
dbRange: config.dbRange ?? [-120, 0],
|
|
870
|
+
colormap: resolvedColormap,
|
|
871
|
+
waveformColor,
|
|
872
|
+
spectrogram: config.spectrogram ?? true,
|
|
873
|
+
loudness: config.loudness ?? true,
|
|
874
|
+
truePeak: config.truePeak ?? true
|
|
875
|
+
};
|
|
876
|
+
}
|
|
877
|
+
function makeCacheKey(fftSize, frequencyScale) {
|
|
878
|
+
return `${fftSize}:${frequencyScale}`;
|
|
879
|
+
}
|
|
880
|
+
var SpectralEngine = class {
|
|
881
|
+
constructor(device) {
|
|
882
|
+
this.pipelineCache = /* @__PURE__ */ new Map();
|
|
883
|
+
this.device = device;
|
|
884
|
+
}
|
|
885
|
+
async prepare(sampleCount, sampleRate, config) {
|
|
886
|
+
const { fftSize: requestedFftSize, frequencyScale } = config;
|
|
887
|
+
const maxFft = getMaxFftSize(this.device);
|
|
888
|
+
const fftSize = Math.min(requestedFftSize, maxFft);
|
|
889
|
+
if (sampleCount < fftSize) {
|
|
890
|
+
throw new Error(`Audio segment too short for FFT size ${fftSize} \u2014 need at least ${fftSize} samples, got ${sampleCount}`);
|
|
891
|
+
}
|
|
892
|
+
const isLinear = frequencyScale === "linear";
|
|
893
|
+
const numBands = isLinear ? fftSize / 2 + 1 : DEFAULT_NON_LINEAR_BANDS;
|
|
894
|
+
const hopSize = Math.max(1, Math.floor(fftSize / HOP_OVERLAP_FACTOR));
|
|
895
|
+
const totalFrames = Math.floor((sampleCount - fftSize) / hopSize) + 1;
|
|
896
|
+
const bandMappingData = computeBandMappings(frequencyScale, numBands, sampleRate, fftSize);
|
|
897
|
+
const resolvedColormap = typeof config.colormap === "string" ? resolveColormap(config.colormap) : config.colormap;
|
|
898
|
+
const colormapData = generateColormapBuffer(resolvedColormap);
|
|
899
|
+
const pipelines = await this.getOrCreatePipelines(fftSize, frequencyScale);
|
|
900
|
+
const magnitudeBuffer = this.device.createBuffer({
|
|
901
|
+
size: totalFrames * numBands * 4,
|
|
902
|
+
usage: GPUBufferUsage.STORAGE | GPUBufferUsage.COPY_SRC
|
|
903
|
+
});
|
|
904
|
+
const bandMappingBuffer = this.device.createBuffer({
|
|
905
|
+
size: Math.max(16, bandMappingData.byteLength),
|
|
906
|
+
usage: GPUBufferUsage.STORAGE | GPUBufferUsage.COPY_DST,
|
|
907
|
+
mappedAtCreation: true
|
|
908
|
+
});
|
|
909
|
+
new Float32Array(bandMappingBuffer.getMappedRange(0, bandMappingData.byteLength)).set(bandMappingData);
|
|
910
|
+
bandMappingBuffer.unmap();
|
|
911
|
+
const colormapPackedData = new Uint32Array(256);
|
|
912
|
+
for (let index = 0; index < 256; index++) {
|
|
913
|
+
const offset = index * 4;
|
|
914
|
+
const red = colormapData[offset] ?? 0;
|
|
915
|
+
const green = colormapData[offset + 1] ?? 0;
|
|
916
|
+
const blue = colormapData[offset + 2] ?? 0;
|
|
917
|
+
const alpha = colormapData[offset + 3] ?? 0;
|
|
918
|
+
colormapPackedData[index] = red | green << 8 | blue << 16 | alpha << 24;
|
|
919
|
+
}
|
|
920
|
+
const colormapBuffer = this.device.createBuffer({
|
|
921
|
+
size: colormapPackedData.byteLength,
|
|
922
|
+
usage: GPUBufferUsage.STORAGE | GPUBufferUsage.COPY_DST,
|
|
923
|
+
mappedAtCreation: true
|
|
924
|
+
});
|
|
925
|
+
new Uint32Array(colormapBuffer.getMappedRange()).set(colormapPackedData);
|
|
926
|
+
colormapBuffer.unmap();
|
|
927
|
+
const inputBuffer = this.device.createBuffer({
|
|
928
|
+
size: (MAX_INPUT_BUFFER_SAMPLES + fftSize) * 4,
|
|
929
|
+
usage: GPUBufferUsage.STORAGE | GPUBufferUsage.COPY_DST
|
|
930
|
+
});
|
|
931
|
+
const uniformBuffer = this.device.createBuffer({
|
|
932
|
+
size: 32,
|
|
933
|
+
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST
|
|
934
|
+
});
|
|
935
|
+
return {
|
|
936
|
+
fftSize,
|
|
937
|
+
hopSize,
|
|
938
|
+
totalFrames,
|
|
939
|
+
overlapBuffer: new Float32Array(fftSize),
|
|
940
|
+
overlapCount: 0,
|
|
941
|
+
hopOffset: 0,
|
|
942
|
+
magnitudeBuffer,
|
|
943
|
+
bandMappingBuffer,
|
|
944
|
+
colormapBuffer,
|
|
945
|
+
inputBuffer,
|
|
946
|
+
uniformBuffer,
|
|
947
|
+
pipelines,
|
|
948
|
+
numBands,
|
|
949
|
+
isLinear
|
|
950
|
+
};
|
|
951
|
+
}
|
|
952
|
+
submitChunk(monoSamples, chunkLength, context) {
|
|
953
|
+
const { fftSize, hopSize, totalFrames, overlapBuffer } = context;
|
|
954
|
+
const totalSamples = context.overlapCount + chunkLength;
|
|
955
|
+
const combined = new Float32Array(totalSamples);
|
|
956
|
+
if (context.overlapCount > 0) {
|
|
957
|
+
combined.set(overlapBuffer.subarray(0, context.overlapCount));
|
|
958
|
+
}
|
|
959
|
+
combined.set(monoSamples.subarray(0, chunkLength), context.overlapCount);
|
|
960
|
+
let localOffset = 0;
|
|
961
|
+
while (localOffset + fftSize <= totalSamples && context.hopOffset < totalFrames) {
|
|
962
|
+
const maxHops = Math.floor((totalSamples - localOffset - fftSize) / hopSize) + 1;
|
|
963
|
+
const remainingHops = totalFrames - context.hopOffset;
|
|
964
|
+
const hopsInBatch = Math.min(maxHops, remainingHops, Math.floor((MAX_INPUT_BUFFER_SAMPLES - fftSize) / hopSize) + 1);
|
|
965
|
+
if (hopsInBatch <= 0) break;
|
|
966
|
+
const batchSamples = (hopsInBatch - 1) * hopSize + fftSize;
|
|
967
|
+
const batchData = combined.subarray(localOffset, localOffset + batchSamples);
|
|
968
|
+
this.device.queue.writeBuffer(context.inputBuffer, 0, batchData.buffer, batchData.byteOffset, batchData.byteLength);
|
|
969
|
+
const uniformData = new Uint32Array(8);
|
|
970
|
+
uniformData[0] = fftSize;
|
|
971
|
+
uniformData[1] = context.hopOffset;
|
|
972
|
+
uniformData[2] = context.numBands;
|
|
973
|
+
uniformData[3] = context.isLinear ? 0 : 1;
|
|
974
|
+
uniformData[4] = hopSize;
|
|
975
|
+
this.device.queue.writeBuffer(context.uniformBuffer, 0, uniformData);
|
|
976
|
+
const fftBindGroup = this.device.createBindGroup({
|
|
977
|
+
layout: context.pipelines.fftPipeline.getBindGroupLayout(0),
|
|
978
|
+
entries: [
|
|
979
|
+
{ binding: 0, resource: { buffer: context.inputBuffer } },
|
|
980
|
+
{ binding: 1, resource: { buffer: context.magnitudeBuffer } },
|
|
981
|
+
{ binding: 2, resource: { buffer: context.bandMappingBuffer } },
|
|
982
|
+
{ binding: 3, resource: { buffer: context.uniformBuffer } }
|
|
983
|
+
]
|
|
984
|
+
});
|
|
985
|
+
const commandEncoder = this.device.createCommandEncoder();
|
|
986
|
+
const computePass = commandEncoder.beginComputePass();
|
|
987
|
+
computePass.setPipeline(context.pipelines.fftPipeline);
|
|
988
|
+
computePass.setBindGroup(0, fftBindGroup);
|
|
989
|
+
computePass.dispatchWorkgroups(hopsInBatch);
|
|
990
|
+
computePass.end();
|
|
991
|
+
this.device.queue.submit([commandEncoder.finish()]);
|
|
992
|
+
localOffset += hopsInBatch * hopSize;
|
|
993
|
+
context.hopOffset += hopsInBatch;
|
|
994
|
+
}
|
|
995
|
+
const consumed = localOffset;
|
|
996
|
+
const remaining = totalSamples - consumed;
|
|
997
|
+
if (remaining > 0) {
|
|
998
|
+
overlapBuffer.set(combined.subarray(consumed, consumed + remaining));
|
|
999
|
+
}
|
|
1000
|
+
context.overlapCount = remaining;
|
|
1001
|
+
}
|
|
1002
|
+
finalize(dimensions, context, config) {
|
|
1003
|
+
const { width, height } = dimensions;
|
|
1004
|
+
const spectrogramTexture = this.device.createTexture({
|
|
1005
|
+
size: dimensions,
|
|
1006
|
+
format: "rgba8unorm",
|
|
1007
|
+
usage: GPUTextureUsage.STORAGE_BINDING | GPUTextureUsage.TEXTURE_BINDING | GPUTextureUsage.COPY_SRC
|
|
1008
|
+
});
|
|
1009
|
+
const spectrogramUniformData = new ArrayBuffer(24);
|
|
1010
|
+
const spectrogramUniforms = new DataView(spectrogramUniformData);
|
|
1011
|
+
spectrogramUniforms.setUint32(0, context.totalFrames, true);
|
|
1012
|
+
spectrogramUniforms.setUint32(4, context.numBands, true);
|
|
1013
|
+
spectrogramUniforms.setUint32(8, width, true);
|
|
1014
|
+
spectrogramUniforms.setUint32(12, height, true);
|
|
1015
|
+
spectrogramUniforms.setFloat32(16, config.dbRange[0], true);
|
|
1016
|
+
spectrogramUniforms.setFloat32(20, config.dbRange[1], true);
|
|
1017
|
+
const spectrogramUniformBuffer = this.device.createBuffer({
|
|
1018
|
+
size: 24,
|
|
1019
|
+
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
|
|
1020
|
+
mappedAtCreation: true
|
|
1021
|
+
});
|
|
1022
|
+
new Uint8Array(spectrogramUniformBuffer.getMappedRange()).set(new Uint8Array(spectrogramUniformData));
|
|
1023
|
+
spectrogramUniformBuffer.unmap();
|
|
1024
|
+
const spectrogramBindGroup = this.device.createBindGroup({
|
|
1025
|
+
layout: context.pipelines.spectrogramPipeline.getBindGroupLayout(0),
|
|
1026
|
+
entries: [
|
|
1027
|
+
{ binding: 0, resource: { buffer: context.magnitudeBuffer } },
|
|
1028
|
+
{ binding: 1, resource: { buffer: context.colormapBuffer } },
|
|
1029
|
+
{ binding: 2, resource: spectrogramTexture.createView() },
|
|
1030
|
+
{ binding: 3, resource: { buffer: spectrogramUniformBuffer } }
|
|
1031
|
+
]
|
|
1032
|
+
});
|
|
1033
|
+
const commandEncoder = this.device.createCommandEncoder();
|
|
1034
|
+
const computePass = commandEncoder.beginComputePass();
|
|
1035
|
+
computePass.setPipeline(context.pipelines.spectrogramPipeline);
|
|
1036
|
+
computePass.setBindGroup(0, spectrogramBindGroup);
|
|
1037
|
+
computePass.dispatchWorkgroups(Math.ceil(width / 64), height);
|
|
1038
|
+
computePass.end();
|
|
1039
|
+
this.device.queue.submit([commandEncoder.finish()]);
|
|
1040
|
+
spectrogramUniformBuffer.destroy();
|
|
1041
|
+
this.cleanupContext(context);
|
|
1042
|
+
return { spectrogramTexture, width, height };
|
|
1043
|
+
}
|
|
1044
|
+
cleanupContext(context) {
|
|
1045
|
+
context.magnitudeBuffer.destroy();
|
|
1046
|
+
context.bandMappingBuffer.destroy();
|
|
1047
|
+
context.colormapBuffer.destroy();
|
|
1048
|
+
context.inputBuffer.destroy();
|
|
1049
|
+
context.uniformBuffer.destroy();
|
|
1050
|
+
}
|
|
1051
|
+
destroy() {
|
|
1052
|
+
this.pipelineCache.clear();
|
|
1053
|
+
}
|
|
1054
|
+
async getOrCreatePipelines(fftSize, frequencyScale) {
|
|
1055
|
+
const cacheString = makeCacheKey(fftSize, frequencyScale);
|
|
1056
|
+
const cached = this.pipelineCache.get(cacheString);
|
|
1057
|
+
if (cached) {
|
|
1058
|
+
return cached;
|
|
1059
|
+
}
|
|
1060
|
+
const workgroupSize = Math.min(fftSize / 2, 256);
|
|
1061
|
+
const fftModule = this.device.createShaderModule({ code: FFT_PIPELINE_SHADER });
|
|
1062
|
+
const spectrogramModule = this.device.createShaderModule({ code: SPECTROGRAM_VISUALIZE_SHADER });
|
|
1063
|
+
await checkShaderCompilation(fftModule, "FFT pipeline");
|
|
1064
|
+
await checkShaderCompilation(spectrogramModule, "spectrogram visualization");
|
|
1065
|
+
const fftPipeline = this.device.createComputePipeline({
|
|
1066
|
+
layout: "auto",
|
|
1067
|
+
compute: {
|
|
1068
|
+
module: fftModule,
|
|
1069
|
+
entryPoint: "main",
|
|
1070
|
+
constants: {
|
|
1071
|
+
WORKGROUP_SIZE: workgroupSize,
|
|
1072
|
+
FFT_SIZE: fftSize
|
|
1073
|
+
}
|
|
1074
|
+
}
|
|
1075
|
+
});
|
|
1076
|
+
const spectrogramPipeline = this.device.createComputePipeline({
|
|
1077
|
+
layout: "auto",
|
|
1078
|
+
compute: {
|
|
1079
|
+
module: spectrogramModule,
|
|
1080
|
+
entryPoint: "main"
|
|
1081
|
+
}
|
|
1082
|
+
});
|
|
1083
|
+
const pipelines = { fftPipeline, spectrogramPipeline };
|
|
1084
|
+
this.pipelineCache.set(cacheString, pipelines);
|
|
1085
|
+
return pipelines;
|
|
1086
|
+
}
|
|
1087
|
+
};
|
|
1088
|
+
|
|
1089
|
+
// src/engine/loudness.ts
|
|
1090
|
+
var LUFS_OFFSET = -0.691;
|
|
1091
|
+
var ABSOLUTE_GATE_THRESHOLD = -70;
|
|
1092
|
+
var RELATIVE_GATE_OFFSET = -10;
|
|
1093
|
+
var WAVEFORM_POINTS_PER_SECOND = 500;
|
|
1094
|
+
var MOMENTARY_WINDOW_MS = 400;
|
|
1095
|
+
var SHORT_TERM_WINDOW_MS = 3e3;
|
|
1096
|
+
var BLOCK_DURATION_MS = 400;
|
|
1097
|
+
var BLOCK_STEP_MS = 100;
|
|
1098
|
+
function meanSquareToLufs(meanSquare) {
|
|
1099
|
+
if (meanSquare <= 0) return -Infinity;
|
|
1100
|
+
return LUFS_OFFSET + 10 * Math.log10(meanSquare);
|
|
1101
|
+
}
|
|
1102
|
+
function computeMomentaryLufs(kWeightedMeanSquare, windowPoints) {
|
|
1103
|
+
const pointCount = kWeightedMeanSquare.length;
|
|
1104
|
+
const result = new Float32Array(pointCount);
|
|
1105
|
+
let runningSum = 0;
|
|
1106
|
+
for (let pt = 0; pt < pointCount; pt++) {
|
|
1107
|
+
runningSum += kWeightedMeanSquare[pt];
|
|
1108
|
+
if (pt >= windowPoints) {
|
|
1109
|
+
runningSum -= kWeightedMeanSquare[pt - windowPoints];
|
|
1110
|
+
}
|
|
1111
|
+
if (pt + 1 < windowPoints) {
|
|
1112
|
+
result[pt] = -Infinity;
|
|
1113
|
+
} else {
|
|
1114
|
+
result[pt] = meanSquareToLufs(runningSum / windowPoints);
|
|
1115
|
+
}
|
|
1116
|
+
}
|
|
1117
|
+
return result;
|
|
1118
|
+
}
|
|
1119
|
+
function computeIntegratedLufs(blockLoudness) {
|
|
1120
|
+
const absoluteSurvivors = [];
|
|
1121
|
+
for (let bi = 0; bi < blockLoudness.length; bi++) {
|
|
1122
|
+
const lufs = blockLoudness[bi];
|
|
1123
|
+
if (lufs > ABSOLUTE_GATE_THRESHOLD) {
|
|
1124
|
+
absoluteSurvivors.push(lufs);
|
|
1125
|
+
}
|
|
1126
|
+
}
|
|
1127
|
+
if (absoluteSurvivors.length === 0) return -Infinity;
|
|
1128
|
+
let absoluteMeanPower = 0;
|
|
1129
|
+
for (const lufs of absoluteSurvivors) {
|
|
1130
|
+
absoluteMeanPower += Math.pow(10, (lufs - LUFS_OFFSET) / 10);
|
|
1131
|
+
}
|
|
1132
|
+
absoluteMeanPower /= absoluteSurvivors.length;
|
|
1133
|
+
const relativeThreshold = meanSquareToLufs(absoluteMeanPower) + RELATIVE_GATE_OFFSET;
|
|
1134
|
+
let relativeMeanPower = 0;
|
|
1135
|
+
let relativeCount = 0;
|
|
1136
|
+
for (const lufs of absoluteSurvivors) {
|
|
1137
|
+
if (lufs > relativeThreshold) {
|
|
1138
|
+
relativeMeanPower += Math.pow(10, (lufs - LUFS_OFFSET) / 10);
|
|
1139
|
+
relativeCount++;
|
|
1140
|
+
}
|
|
1141
|
+
}
|
|
1142
|
+
if (relativeCount === 0) return -Infinity;
|
|
1143
|
+
relativeMeanPower /= relativeCount;
|
|
1144
|
+
return meanSquareToLufs(relativeMeanPower);
|
|
1145
|
+
}
|
|
1146
|
+
function computeLoudnessData(scanContext, overallPeak, overallRms, truePeak) {
|
|
1147
|
+
const { rmsEnvelope, peakEnvelope } = scanContext;
|
|
1148
|
+
const pointCount = scanContext.state.pointIndex;
|
|
1149
|
+
const kWeightedMeanSquare = scanContext.kWeightedMeanSquare.subarray(0, pointCount);
|
|
1150
|
+
const momentaryWindowPoints = Math.round(MOMENTARY_WINDOW_MS / 1e3 * WAVEFORM_POINTS_PER_SECOND);
|
|
1151
|
+
const shortTermWindowPoints = Math.round(SHORT_TERM_WINDOW_MS / 1e3 * WAVEFORM_POINTS_PER_SECOND);
|
|
1152
|
+
const momentaryLufs = computeMomentaryLufs(kWeightedMeanSquare, momentaryWindowPoints);
|
|
1153
|
+
const shortTermLufs = computeMomentaryLufs(kWeightedMeanSquare, shortTermWindowPoints);
|
|
1154
|
+
const blockPoints = Math.round(BLOCK_DURATION_MS / 1e3 * WAVEFORM_POINTS_PER_SECOND);
|
|
1155
|
+
const stepPoints = Math.round(BLOCK_STEP_MS / 1e3 * WAVEFORM_POINTS_PER_SECOND);
|
|
1156
|
+
const blockLoudnessValues = [];
|
|
1157
|
+
for (let start = 0; start + blockPoints <= pointCount; start += stepPoints) {
|
|
1158
|
+
let sum = 0;
|
|
1159
|
+
for (let pt = start; pt < start + blockPoints; pt++) {
|
|
1160
|
+
sum += kWeightedMeanSquare[pt];
|
|
1161
|
+
}
|
|
1162
|
+
blockLoudnessValues.push(meanSquareToLufs(sum / blockPoints));
|
|
1163
|
+
}
|
|
1164
|
+
const blockLoudness = new Float32Array(blockLoudnessValues);
|
|
1165
|
+
const integratedLufs = computeIntegratedLufs(blockLoudness);
|
|
1166
|
+
const peakDb = overallPeak > 0 ? 20 * Math.log10(overallPeak) : -Infinity;
|
|
1167
|
+
const truePeakDb = truePeak !== void 0 && truePeak > 0 ? 20 * Math.log10(truePeak) : void 0;
|
|
1168
|
+
const rmsDb = overallRms > 0 ? 20 * Math.log10(overallRms) : -Infinity;
|
|
1169
|
+
const crestFactor = overallRms > 0 ? 20 * Math.log10(overallPeak / overallRms) : 0;
|
|
1170
|
+
return {
|
|
1171
|
+
rmsEnvelope,
|
|
1172
|
+
peakEnvelope,
|
|
1173
|
+
momentaryLufs,
|
|
1174
|
+
shortTermLufs,
|
|
1175
|
+
integratedLufs,
|
|
1176
|
+
peakDb,
|
|
1177
|
+
truePeak,
|
|
1178
|
+
truePeakDb,
|
|
1179
|
+
rmsDb,
|
|
1180
|
+
crestFactor,
|
|
1181
|
+
pointCount
|
|
1182
|
+
};
|
|
1183
|
+
}
|
|
1184
|
+
|
|
1185
|
+
// src/engine/k-weighting.ts
|
|
1186
|
+
function createBiquadState() {
|
|
1187
|
+
return { x1: 0, x2: 0, y1: 0, y2: 0 };
|
|
1188
|
+
}
|
|
1189
|
+
function computeHighShelfCoefficients(sampleRate) {
|
|
1190
|
+
const freq = 1681.974450955533;
|
|
1191
|
+
const gain = 3.999843853973347;
|
|
1192
|
+
const quality = 0.7071752369554196;
|
|
1193
|
+
const vh = Math.pow(10, gain / 20);
|
|
1194
|
+
const vb = Math.sqrt(vh);
|
|
1195
|
+
const kk = Math.tan(Math.PI * freq / sampleRate);
|
|
1196
|
+
const k2 = kk * kk;
|
|
1197
|
+
const denominator = 1 + kk / quality + k2;
|
|
1198
|
+
return {
|
|
1199
|
+
b0: (vh + vb * kk / quality + k2) / denominator,
|
|
1200
|
+
b1: 2 * (k2 - vh) / denominator,
|
|
1201
|
+
b2: (vh - vb * kk / quality + k2) / denominator,
|
|
1202
|
+
a1: 2 * (k2 - 1) / denominator,
|
|
1203
|
+
a2: (1 - kk / quality + k2) / denominator
|
|
1204
|
+
};
|
|
1205
|
+
}
|
|
1206
|
+
function computeHighPassCoefficients(sampleRate) {
|
|
1207
|
+
const freq = 38.13547087602444;
|
|
1208
|
+
const quality = 0.5003270373238773;
|
|
1209
|
+
const kk = Math.tan(Math.PI * freq / sampleRate);
|
|
1210
|
+
const k2 = kk * kk;
|
|
1211
|
+
const denominator = 1 + kk / quality + k2;
|
|
1212
|
+
return {
|
|
1213
|
+
b0: 1 / denominator,
|
|
1214
|
+
b1: -2 / denominator,
|
|
1215
|
+
b2: 1 / denominator,
|
|
1216
|
+
a1: 2 * (k2 - 1) / denominator,
|
|
1217
|
+
a2: (1 - kk / quality + k2) / denominator
|
|
1218
|
+
};
|
|
1219
|
+
}
|
|
1220
|
+
function computeKWeightingCoefficients(sampleRate) {
|
|
1221
|
+
return {
|
|
1222
|
+
stage1: computeHighShelfCoefficients(sampleRate),
|
|
1223
|
+
stage2: computeHighPassCoefficients(sampleRate)
|
|
1224
|
+
};
|
|
1225
|
+
}
|
|
1226
|
+
|
|
1227
|
+
// src/engine/true-peak.ts
|
|
1228
|
+
var OVERSAMPLING = 4;
|
|
1229
|
+
var TAPS_PER_PHASE = 12;
|
|
1230
|
+
var FILTER_LENGTH = TAPS_PER_PHASE * OVERSAMPLING;
|
|
1231
|
+
function createTruePeakState() {
|
|
1232
|
+
return {
|
|
1233
|
+
buffer: new Float32Array(TAPS_PER_PHASE * 2),
|
|
1234
|
+
index: 0
|
|
1235
|
+
};
|
|
1236
|
+
}
|
|
1237
|
+
function besselI0(x) {
|
|
1238
|
+
let sum = 1;
|
|
1239
|
+
let term = 1;
|
|
1240
|
+
for (let ki = 1; ki <= 25; ki++) {
|
|
1241
|
+
term *= x / (2 * ki) * (x / (2 * ki));
|
|
1242
|
+
sum += term;
|
|
1243
|
+
}
|
|
1244
|
+
return sum;
|
|
1245
|
+
}
|
|
1246
|
+
function computePhases() {
|
|
1247
|
+
const center = (FILTER_LENGTH - 1) / 2;
|
|
1248
|
+
const prototype = new Float32Array(FILTER_LENGTH);
|
|
1249
|
+
const beta = 9;
|
|
1250
|
+
const i0Beta = besselI0(beta);
|
|
1251
|
+
for (let ni = 0; ni < FILTER_LENGTH; ni++) {
|
|
1252
|
+
const x = (ni - center) / OVERSAMPLING;
|
|
1253
|
+
const sinc = x === 0 ? 1 : Math.sin(Math.PI * x) / (Math.PI * x);
|
|
1254
|
+
const arg = 2 * ni / (FILTER_LENGTH - 1) - 1;
|
|
1255
|
+
const win = besselI0(beta * Math.sqrt(Math.max(0, 1 - arg * arg))) / i0Beta;
|
|
1256
|
+
prototype[ni] = sinc * win;
|
|
1257
|
+
}
|
|
1258
|
+
const phases = [];
|
|
1259
|
+
for (let ph = 0; ph < OVERSAMPLING; ph++) {
|
|
1260
|
+
const phase = new Float32Array(TAPS_PER_PHASE);
|
|
1261
|
+
for (let ti = 0; ti < TAPS_PER_PHASE; ti++) {
|
|
1262
|
+
phase[ti] = prototype[OVERSAMPLING * ti + ph];
|
|
1263
|
+
}
|
|
1264
|
+
const dcGain = phase.reduce((sum, coeff) => sum + coeff, 0);
|
|
1265
|
+
if (dcGain > 0) {
|
|
1266
|
+
for (let ti = 0; ti < TAPS_PER_PHASE; ti++) {
|
|
1267
|
+
phase[ti] = phase[ti] / dcGain;
|
|
1268
|
+
}
|
|
1269
|
+
}
|
|
1270
|
+
phases.push(phase);
|
|
1271
|
+
}
|
|
1272
|
+
return phases;
|
|
1273
|
+
}
|
|
1274
|
+
var truePeakPhases = computePhases();
|
|
1275
|
+
function truePeakMaxAbs(sample, state) {
|
|
1276
|
+
const { buffer } = state;
|
|
1277
|
+
buffer[state.index] = sample;
|
|
1278
|
+
buffer[state.index + TAPS_PER_PHASE] = sample;
|
|
1279
|
+
state.index = state.index + 1;
|
|
1280
|
+
if (state.index >= TAPS_PER_PHASE) state.index = 0;
|
|
1281
|
+
const readBase = state.index;
|
|
1282
|
+
let maxAbs = 0;
|
|
1283
|
+
for (let ph = 0; ph < OVERSAMPLING; ph++) {
|
|
1284
|
+
const phase = truePeakPhases[ph];
|
|
1285
|
+
let sum = 0;
|
|
1286
|
+
for (let ti = 0; ti < TAPS_PER_PHASE; ti++) {
|
|
1287
|
+
sum += phase[ti] * buffer[readBase + TAPS_PER_PHASE - 1 - ti];
|
|
1288
|
+
}
|
|
1289
|
+
const abs = sum < 0 ? -sum : sum;
|
|
1290
|
+
if (abs > maxAbs) maxAbs = abs;
|
|
1291
|
+
}
|
|
1292
|
+
return maxAbs;
|
|
1293
|
+
}
|
|
1294
|
+
|
|
1295
|
+
// src/engine/sample-scan.ts
|
|
1296
|
+
function createScanContext(metadata, pointCount, samplesPerPoint, chunkSize, computeLoudness = true, computeTruePeak = true) {
|
|
1297
|
+
const { channelCount, sampleRate, channelWeights: weights } = metadata;
|
|
1298
|
+
const biquadStates = [];
|
|
1299
|
+
const truePeakStates = [];
|
|
1300
|
+
for (let ch = 0; ch < channelCount; ch++) {
|
|
1301
|
+
biquadStates.push({
|
|
1302
|
+
stage1: createBiquadState(),
|
|
1303
|
+
stage2: createBiquadState()
|
|
1304
|
+
});
|
|
1305
|
+
truePeakStates.push(createTruePeakState());
|
|
1306
|
+
}
|
|
1307
|
+
const channelWeights = new Float32Array(channelCount);
|
|
1308
|
+
if (weights) {
|
|
1309
|
+
for (let ch = 0; ch < channelCount; ch++) {
|
|
1310
|
+
channelWeights[ch] = weights[ch] ?? 1;
|
|
1311
|
+
}
|
|
1312
|
+
} else {
|
|
1313
|
+
channelWeights.fill(1);
|
|
1314
|
+
}
|
|
1315
|
+
return {
|
|
1316
|
+
channelCount,
|
|
1317
|
+
channelWeights,
|
|
1318
|
+
samplesPerPoint,
|
|
1319
|
+
computeLoudness,
|
|
1320
|
+
computeTruePeak,
|
|
1321
|
+
kWeightingCoefficients: computeKWeightingCoefficients(sampleRate),
|
|
1322
|
+
state: {
|
|
1323
|
+
pointIndex: 0,
|
|
1324
|
+
samplesInCurrentPoint: 0,
|
|
1325
|
+
pointMin: Infinity,
|
|
1326
|
+
pointMax: -Infinity,
|
|
1327
|
+
pointSumSq: 0,
|
|
1328
|
+
pointPeak: 0,
|
|
1329
|
+
kWeightedPointSum: 0,
|
|
1330
|
+
overallPeakAbs: 0,
|
|
1331
|
+
overallSumSquares: 0,
|
|
1332
|
+
totalSampleValues: 0,
|
|
1333
|
+
truePeakAbs: 0,
|
|
1334
|
+
biquadStates,
|
|
1335
|
+
truePeakStates
|
|
1336
|
+
},
|
|
1337
|
+
monoBuffer: new Float32Array(chunkSize),
|
|
1338
|
+
kwBuffer: new Float32Array(chunkSize),
|
|
1339
|
+
waveformBuffer: new Float32Array(pointCount * 2),
|
|
1340
|
+
rmsEnvelope: new Float32Array(pointCount),
|
|
1341
|
+
peakEnvelope: new Float32Array(pointCount),
|
|
1342
|
+
kWeightedMeanSquare: new Float32Array(pointCount)
|
|
1343
|
+
};
|
|
1344
|
+
}
|
|
1345
|
+
function finalizeScan(context) {
|
|
1346
|
+
const { state } = context;
|
|
1347
|
+
return {
|
|
1348
|
+
overallPeak: state.overallPeakAbs,
|
|
1349
|
+
overallRms: state.totalSampleValues > 0 ? Math.sqrt(state.overallSumSquares / state.totalSampleValues) : 0,
|
|
1350
|
+
truePeak: state.truePeakAbs
|
|
1351
|
+
};
|
|
1352
|
+
}
|
|
1353
|
+
function scanSamples(channelBuffers, samplesPerChannel, context, timing) {
|
|
1354
|
+
const { channelCount, channelWeights, samplesPerPoint, computeLoudness, computeTruePeak, kWeightingCoefficients, state, monoBuffer, kwBuffer, waveformBuffer, rmsEnvelope, peakEnvelope, kWeightedMeanSquare } = context;
|
|
1355
|
+
const invChannels = 1 / channelCount;
|
|
1356
|
+
const { stage1: s1Coeffs, stage2: s2Coeffs } = kWeightingCoefficients;
|
|
1357
|
+
const lastChannel = channelCount - 1;
|
|
1358
|
+
const pointCount = Math.ceil(waveformBuffer.length / 2);
|
|
1359
|
+
monoBuffer.fill(0, 0, samplesPerChannel);
|
|
1360
|
+
kwBuffer.fill(0, 0, samplesPerChannel);
|
|
1361
|
+
let { pointIndex, samplesInCurrentPoint } = state;
|
|
1362
|
+
let { pointMin, pointMax, pointSumSq, pointPeak, kWeightedPointSum } = state;
|
|
1363
|
+
let { overallPeakAbs, overallSumSquares, totalSampleValues, truePeakAbs } = state;
|
|
1364
|
+
const s1b0 = s1Coeffs.b0;
|
|
1365
|
+
const s1b1 = s1Coeffs.b1;
|
|
1366
|
+
const s1b2 = s1Coeffs.b2;
|
|
1367
|
+
const s1a1 = s1Coeffs.a1;
|
|
1368
|
+
const s1a2 = s1Coeffs.a2;
|
|
1369
|
+
const s2b0 = s2Coeffs.b0;
|
|
1370
|
+
const s2b1 = s2Coeffs.b1;
|
|
1371
|
+
const s2b2 = s2Coeffs.b2;
|
|
1372
|
+
const s2a1 = s2Coeffs.a1;
|
|
1373
|
+
const s2a2 = s2Coeffs.a2;
|
|
1374
|
+
for (let ch = 0; ch < channelCount; ch++) {
|
|
1375
|
+
const channelData = channelBuffers[ch];
|
|
1376
|
+
const biquad = state.biquadStates[ch];
|
|
1377
|
+
const chWeight = channelWeights[ch];
|
|
1378
|
+
const tpState = state.truePeakStates[ch];
|
|
1379
|
+
let s1x1 = biquad.stage1.x1;
|
|
1380
|
+
let s1x2 = biquad.stage1.x2;
|
|
1381
|
+
let s1y1 = biquad.stage1.y1;
|
|
1382
|
+
let s1y2 = biquad.stage1.y2;
|
|
1383
|
+
let s2x1 = biquad.stage2.x1;
|
|
1384
|
+
let s2x2 = biquad.stage2.x2;
|
|
1385
|
+
let s2y1 = biquad.stage2.y1;
|
|
1386
|
+
let s2y2 = biquad.stage2.y2;
|
|
1387
|
+
if (ch < lastChannel) {
|
|
1388
|
+
for (let si = 0; si < samplesPerChannel; si++) {
|
|
1389
|
+
const sample = channelData[si];
|
|
1390
|
+
monoBuffer[si] = monoBuffer[si] + sample;
|
|
1391
|
+
if (computeLoudness) {
|
|
1392
|
+
const s1out = s1b0 * sample + s1b1 * s1x1 + s1b2 * s1x2 - s1a1 * s1y1 - s1a2 * s1y2;
|
|
1393
|
+
s1x2 = s1x1;
|
|
1394
|
+
s1x1 = sample;
|
|
1395
|
+
s1y2 = s1y1;
|
|
1396
|
+
s1y1 = s1out;
|
|
1397
|
+
const kw = s2b0 * s1out + s2b1 * s2x1 + s2b2 * s2x2 - s2a1 * s2y1 - s2a2 * s2y2;
|
|
1398
|
+
s2x2 = s2x1;
|
|
1399
|
+
s2x1 = s1out;
|
|
1400
|
+
s2y2 = s2y1;
|
|
1401
|
+
s2y1 = kw;
|
|
1402
|
+
kwBuffer[si] = kwBuffer[si] + chWeight * kw * kw;
|
|
1403
|
+
}
|
|
1404
|
+
if (computeTruePeak) {
|
|
1405
|
+
const tp = truePeakMaxAbs(sample, tpState);
|
|
1406
|
+
if (tp > truePeakAbs) truePeakAbs = tp;
|
|
1407
|
+
}
|
|
1408
|
+
}
|
|
1409
|
+
} else {
|
|
1410
|
+
for (let si = 0; si < samplesPerChannel; si++) {
|
|
1411
|
+
const sample = channelData[si];
|
|
1412
|
+
const mono = (monoBuffer[si] + sample) * invChannels;
|
|
1413
|
+
monoBuffer[si] = mono;
|
|
1414
|
+
if (computeLoudness) {
|
|
1415
|
+
const s1out = s1b0 * sample + s1b1 * s1x1 + s1b2 * s1x2 - s1a1 * s1y1 - s1a2 * s1y2;
|
|
1416
|
+
s1x2 = s1x1;
|
|
1417
|
+
s1x1 = sample;
|
|
1418
|
+
s1y2 = s1y1;
|
|
1419
|
+
s1y1 = s1out;
|
|
1420
|
+
const kw = s2b0 * s1out + s2b1 * s2x1 + s2b2 * s2x2 - s2a1 * s2y1 - s2a2 * s2y2;
|
|
1421
|
+
s2x2 = s2x1;
|
|
1422
|
+
s2x1 = s1out;
|
|
1423
|
+
s2y2 = s2y1;
|
|
1424
|
+
s2y1 = kw;
|
|
1425
|
+
kWeightedPointSum += kwBuffer[si] + chWeight * kw * kw;
|
|
1426
|
+
}
|
|
1427
|
+
if (computeTruePeak) {
|
|
1428
|
+
const tp = truePeakMaxAbs(sample, tpState);
|
|
1429
|
+
if (tp > truePeakAbs) truePeakAbs = tp;
|
|
1430
|
+
}
|
|
1431
|
+
const sq = mono * mono;
|
|
1432
|
+
const abs = mono < 0 ? -mono : mono;
|
|
1433
|
+
if (mono < pointMin) pointMin = mono;
|
|
1434
|
+
if (mono > pointMax) pointMax = mono;
|
|
1435
|
+
pointSumSq += sq;
|
|
1436
|
+
if (abs > pointPeak) pointPeak = abs;
|
|
1437
|
+
if (abs > overallPeakAbs) overallPeakAbs = abs;
|
|
1438
|
+
overallSumSquares += sq;
|
|
1439
|
+
samplesInCurrentPoint++;
|
|
1440
|
+
totalSampleValues++;
|
|
1441
|
+
if (samplesInCurrentPoint >= samplesPerPoint && pointIndex < pointCount) {
|
|
1442
|
+
const wo = pointIndex * 2;
|
|
1443
|
+
const invSamples = 1 / samplesInCurrentPoint;
|
|
1444
|
+
waveformBuffer[wo] = pointMin;
|
|
1445
|
+
waveformBuffer[wo + 1] = pointMax;
|
|
1446
|
+
rmsEnvelope[pointIndex] = Math.sqrt(pointSumSq * invSamples);
|
|
1447
|
+
peakEnvelope[pointIndex] = pointPeak;
|
|
1448
|
+
kWeightedMeanSquare[pointIndex] = kWeightedPointSum * invSamples;
|
|
1449
|
+
pointMin = Infinity;
|
|
1450
|
+
pointMax = -Infinity;
|
|
1451
|
+
pointSumSq = 0;
|
|
1452
|
+
pointPeak = 0;
|
|
1453
|
+
kWeightedPointSum = 0;
|
|
1454
|
+
samplesInCurrentPoint = 0;
|
|
1455
|
+
pointIndex++;
|
|
1456
|
+
}
|
|
1457
|
+
}
|
|
1458
|
+
}
|
|
1459
|
+
biquad.stage1.x1 = s1x1;
|
|
1460
|
+
biquad.stage1.x2 = s1x2;
|
|
1461
|
+
biquad.stage1.y1 = s1y1;
|
|
1462
|
+
biquad.stage1.y2 = s1y2;
|
|
1463
|
+
biquad.stage2.x1 = s2x1;
|
|
1464
|
+
biquad.stage2.x2 = s2x2;
|
|
1465
|
+
biquad.stage2.y1 = s2y1;
|
|
1466
|
+
biquad.stage2.y2 = s2y2;
|
|
1467
|
+
}
|
|
1468
|
+
state.overallPeakAbs = overallPeakAbs;
|
|
1469
|
+
state.overallSumSquares = overallSumSquares;
|
|
1470
|
+
state.totalSampleValues = totalSampleValues;
|
|
1471
|
+
state.truePeakAbs = truePeakAbs;
|
|
1472
|
+
state.pointIndex = pointIndex;
|
|
1473
|
+
state.samplesInCurrentPoint = samplesInCurrentPoint;
|
|
1474
|
+
state.pointMin = pointMin;
|
|
1475
|
+
state.pointMax = pointMax;
|
|
1476
|
+
state.pointSumSq = pointSumSq;
|
|
1477
|
+
state.pointPeak = pointPeak;
|
|
1478
|
+
state.kWeightedPointSum = kWeightedPointSum;
|
|
1479
|
+
}
|
|
1480
|
+
|
|
1481
|
+
// src/engine/runPipeline.ts
|
|
1482
|
+
var DEFAULT_CHUNK_SIZE = 131072;
|
|
1483
|
+
function yieldControl() {
|
|
1484
|
+
if (typeof scheduler !== "undefined" && typeof scheduler.yield === "function") {
|
|
1485
|
+
return scheduler.yield();
|
|
1486
|
+
}
|
|
1487
|
+
return new Promise((resolve) => setTimeout(resolve, 0));
|
|
1488
|
+
}
|
|
1489
|
+
async function runPipeline(options, engine) {
|
|
1490
|
+
const { metadata, sampleQuery, readSamples, config } = options;
|
|
1491
|
+
const { sampleRate, channelCount } = metadata;
|
|
1492
|
+
const { startSample, endSample } = sampleQuery;
|
|
1493
|
+
const { signal } = config;
|
|
1494
|
+
const sampleCount = endSample - startSample;
|
|
1495
|
+
const samplesPerPoint = Math.round(sampleRate / WAVEFORM_POINTS_PER_SECOND);
|
|
1496
|
+
const pointCount = Math.ceil(sampleCount / samplesPerPoint);
|
|
1497
|
+
const resolvedConfig = resolveConfig(config);
|
|
1498
|
+
const { spectrogram, loudness, truePeak: computeTruePeak } = resolvedConfig;
|
|
1499
|
+
const scanContext = createScanContext(metadata, pointCount, samplesPerPoint, DEFAULT_CHUNK_SIZE, loudness, computeTruePeak);
|
|
1500
|
+
const spectralContext = spectrogram ? await engine.prepare(sampleCount, sampleRate, resolvedConfig) : null;
|
|
1501
|
+
let offset = 0;
|
|
1502
|
+
try {
|
|
1503
|
+
while (offset < sampleCount) {
|
|
1504
|
+
if (signal.aborted) {
|
|
1505
|
+
if (spectralContext) engine.cleanupContext(spectralContext);
|
|
1506
|
+
throw new DOMException("Aborted", "AbortError");
|
|
1507
|
+
}
|
|
1508
|
+
const chunkFrames = Math.min(DEFAULT_CHUNK_SIZE, sampleCount - offset);
|
|
1509
|
+
const channelBuffers = await Promise.all(Array.from({ length: channelCount }, (_, ch) => readSamples(ch, startSample + offset, chunkFrames)));
|
|
1510
|
+
scanSamples(channelBuffers, chunkFrames, scanContext);
|
|
1511
|
+
if (spectralContext) {
|
|
1512
|
+
engine.submitChunk(scanContext.monoBuffer, chunkFrames, spectralContext);
|
|
1513
|
+
}
|
|
1514
|
+
offset += chunkFrames;
|
|
1515
|
+
await yieldControl();
|
|
1516
|
+
}
|
|
1517
|
+
} catch (error) {
|
|
1518
|
+
if (spectralContext) engine.cleanupContext(spectralContext);
|
|
1519
|
+
throw error;
|
|
1520
|
+
}
|
|
1521
|
+
const { overallPeak, overallRms, truePeak } = finalizeScan(scanContext);
|
|
1522
|
+
const loudnessData = loudness ? computeLoudnessData(scanContext, overallPeak, overallRms, computeTruePeak ? truePeak : void 0) : null;
|
|
1523
|
+
const spectrogramTexture = spectralContext ? engine.finalize(sampleQuery, spectralContext, resolvedConfig).spectrogramTexture : null;
|
|
1524
|
+
const resolvedOptions = {
|
|
1525
|
+
...options,
|
|
1526
|
+
config: resolvedConfig
|
|
1527
|
+
};
|
|
1528
|
+
return {
|
|
1529
|
+
waveformBuffer: scanContext.waveformBuffer,
|
|
1530
|
+
waveformPointCount: scanContext.state.pointIndex,
|
|
1531
|
+
loudnessData,
|
|
1532
|
+
spectrogramTexture,
|
|
1533
|
+
options: resolvedOptions
|
|
1534
|
+
};
|
|
1535
|
+
}
|
|
1536
|
+
|
|
1537
|
+
// src/useSpectralCompute.tsx
|
|
1538
|
+
var EMPTY_RESULT = { status: "idle" };
|
|
1539
|
+
function useSpectralCompute(options) {
|
|
1540
|
+
const { metadata, query, readSamples, config } = options;
|
|
1541
|
+
const { sampleRate, sampleCount, channelCount } = metadata;
|
|
1542
|
+
const { startMs, endMs, width, height } = query;
|
|
1543
|
+
const providedDevice = config?.device;
|
|
1544
|
+
let signal = config?.signal;
|
|
1545
|
+
const deviceReference = useRef(null);
|
|
1546
|
+
const engineReference = useRef(null);
|
|
1547
|
+
const engineDeviceRef = useRef(null);
|
|
1548
|
+
const previousTextureRef = useRef(null);
|
|
1549
|
+
const abortControllerReference = useRef(null);
|
|
1550
|
+
const readSamplesRef = useRef(readSamples);
|
|
1551
|
+
readSamplesRef.current = readSamples;
|
|
1552
|
+
const [result, setResult] = useState(EMPTY_RESULT);
|
|
1553
|
+
const configKey = JSON.stringify(config ?? null);
|
|
1554
|
+
useEffect(() => {
|
|
1555
|
+
abortControllerReference.current?.abort();
|
|
1556
|
+
if (metadata.sampleCount === 0) return;
|
|
1557
|
+
const controller = new AbortController();
|
|
1558
|
+
abortControllerReference.current = controller;
|
|
1559
|
+
signal ?? (signal = controller.signal);
|
|
1560
|
+
if (signal.aborted) {
|
|
1561
|
+
controller.abort();
|
|
1562
|
+
} else {
|
|
1563
|
+
signal.addEventListener("abort", () => controller.abort(), { once: true });
|
|
1564
|
+
}
|
|
1565
|
+
const sampleQuery = {
|
|
1566
|
+
startSample: Math.floor(startMs / 1e3 * sampleRate),
|
|
1567
|
+
endSample: Math.min(Math.ceil(endMs / 1e3 * sampleRate), sampleCount),
|
|
1568
|
+
width,
|
|
1569
|
+
height
|
|
1570
|
+
};
|
|
1571
|
+
void (async () => {
|
|
1572
|
+
try {
|
|
1573
|
+
deviceReference.current ?? (deviceReference.current = await getDevice(providedDevice));
|
|
1574
|
+
const device = deviceReference.current;
|
|
1575
|
+
const pipelineOptions = {
|
|
1576
|
+
metadata,
|
|
1577
|
+
sampleQuery,
|
|
1578
|
+
readSamples,
|
|
1579
|
+
config: {
|
|
1580
|
+
...config,
|
|
1581
|
+
device,
|
|
1582
|
+
signal
|
|
1583
|
+
}
|
|
1584
|
+
};
|
|
1585
|
+
if (engineReference.current && engineDeviceRef.current !== device) {
|
|
1586
|
+
engineReference.current.destroy();
|
|
1587
|
+
engineReference.current = null;
|
|
1588
|
+
}
|
|
1589
|
+
engineReference.current ?? (engineReference.current = new SpectralEngine(device));
|
|
1590
|
+
engineDeviceRef.current = device;
|
|
1591
|
+
const pipelineResult = await runPipeline(pipelineOptions, engineReference.current);
|
|
1592
|
+
previousTextureRef.current?.destroy();
|
|
1593
|
+
previousTextureRef.current = pipelineResult.spectrogramTexture;
|
|
1594
|
+
setResult({
|
|
1595
|
+
status: "ready",
|
|
1596
|
+
...pipelineResult
|
|
1597
|
+
});
|
|
1598
|
+
} catch (error) {
|
|
1599
|
+
if (error instanceof DOMException && error.name === "AbortError") {
|
|
1600
|
+
return;
|
|
1601
|
+
}
|
|
1602
|
+
setResult({
|
|
1603
|
+
status: "error",
|
|
1604
|
+
error: error instanceof Error ? error : new Error(String(error))
|
|
1605
|
+
});
|
|
1606
|
+
}
|
|
1607
|
+
})();
|
|
1608
|
+
return () => {
|
|
1609
|
+
controller.abort();
|
|
1610
|
+
};
|
|
1611
|
+
}, [sampleRate, channelCount, sampleCount, startMs, endMs, width, height, providedDevice, configKey]);
|
|
1612
|
+
useEffect(
|
|
1613
|
+
() => () => {
|
|
1614
|
+
engineReference.current?.destroy();
|
|
1615
|
+
engineReference.current = null;
|
|
1616
|
+
previousTextureRef.current?.destroy();
|
|
1617
|
+
previousTextureRef.current = null;
|
|
1618
|
+
},
|
|
1619
|
+
[]
|
|
1620
|
+
);
|
|
1621
|
+
return result;
|
|
1622
|
+
}
|
|
1623
|
+
var DEFAULT_WAVEFORM_COLOR = [0, 255, 0];
|
|
1624
|
+
var WaveformCanvas = ({ computeResult, ref, color = DEFAULT_WAVEFORM_COLOR }) => {
|
|
1625
|
+
const [internalCanvasReference, canvasCallback] = useCanvasRef(ref);
|
|
1626
|
+
const blitReference = useRef(null);
|
|
1627
|
+
const blitDeviceRef = useRef(null);
|
|
1628
|
+
const pipelineReference = useRef(null);
|
|
1629
|
+
const waveformGpuBufferRef = useRef(null);
|
|
1630
|
+
const outputTextureRef = useRef(null);
|
|
1631
|
+
const uniformBufferRef = useRef(null);
|
|
1632
|
+
const bindGroupRef = useRef(null);
|
|
1633
|
+
const lastComputeResultRef = useRef(null);
|
|
1634
|
+
const lastDimensionsRef = useRef(null);
|
|
1635
|
+
useEffect(() => {
|
|
1636
|
+
const canvas = internalCanvasReference.current;
|
|
1637
|
+
if (!canvas || computeResult.status !== "ready" || !computeResult.waveformBuffer || computeResult.waveformPointCount === 0) {
|
|
1638
|
+
return;
|
|
1639
|
+
}
|
|
1640
|
+
const { device } = computeResult.options.config;
|
|
1641
|
+
const { width: width2, height: height2 } = computeResult.options.sampleQuery;
|
|
1642
|
+
const { waveformBuffer, waveformPointCount } = computeResult;
|
|
1643
|
+
if (blitReference.current && blitDeviceRef.current !== device) {
|
|
1644
|
+
blitReference.current.destroy();
|
|
1645
|
+
blitReference.current = null;
|
|
1646
|
+
pipelineReference.current = null;
|
|
1647
|
+
waveformGpuBufferRef.current?.destroy();
|
|
1648
|
+
waveformGpuBufferRef.current = null;
|
|
1649
|
+
outputTextureRef.current?.destroy();
|
|
1650
|
+
outputTextureRef.current = null;
|
|
1651
|
+
uniformBufferRef.current?.destroy();
|
|
1652
|
+
uniformBufferRef.current = null;
|
|
1653
|
+
bindGroupRef.current = null;
|
|
1654
|
+
lastComputeResultRef.current = null;
|
|
1655
|
+
lastDimensionsRef.current = null;
|
|
1656
|
+
}
|
|
1657
|
+
blitReference.current ?? (blitReference.current = new BlitRenderer(device, canvas));
|
|
1658
|
+
blitDeviceRef.current = device;
|
|
1659
|
+
if (!pipelineReference.current) {
|
|
1660
|
+
const shaderModule = device.createShaderModule({ code: WAVEFORM_VISUALIZE_SHADER });
|
|
1661
|
+
pipelineReference.current = device.createComputePipeline({
|
|
1662
|
+
layout: "auto",
|
|
1663
|
+
compute: {
|
|
1664
|
+
module: shaderModule,
|
|
1665
|
+
entryPoint: "main"
|
|
1666
|
+
}
|
|
1667
|
+
});
|
|
1668
|
+
}
|
|
1669
|
+
const pipeline = pipelineReference.current;
|
|
1670
|
+
const computeResultChanged = lastComputeResultRef.current !== computeResult;
|
|
1671
|
+
if (computeResultChanged) {
|
|
1672
|
+
waveformGpuBufferRef.current?.destroy();
|
|
1673
|
+
outputTextureRef.current?.destroy();
|
|
1674
|
+
uniformBufferRef.current?.destroy();
|
|
1675
|
+
const gpuWaveformBuffer = device.createBuffer({
|
|
1676
|
+
size: waveformBuffer.byteLength,
|
|
1677
|
+
usage: GPUBufferUsage.STORAGE | GPUBufferUsage.COPY_DST,
|
|
1678
|
+
mappedAtCreation: true
|
|
1679
|
+
});
|
|
1680
|
+
new Float32Array(gpuWaveformBuffer.getMappedRange()).set(waveformBuffer);
|
|
1681
|
+
gpuWaveformBuffer.unmap();
|
|
1682
|
+
const outputTexture = device.createTexture({
|
|
1683
|
+
size: { width: width2, height: height2 },
|
|
1684
|
+
format: "rgba8unorm",
|
|
1685
|
+
usage: GPUTextureUsage.STORAGE_BINDING | GPUTextureUsage.TEXTURE_BINDING | GPUTextureUsage.COPY_SRC
|
|
1686
|
+
});
|
|
1687
|
+
const uniformBuffer = device.createBuffer({
|
|
1688
|
+
size: 24,
|
|
1689
|
+
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST
|
|
1690
|
+
});
|
|
1691
|
+
const bindGroup = device.createBindGroup({
|
|
1692
|
+
layout: pipeline.getBindGroupLayout(0),
|
|
1693
|
+
entries: [
|
|
1694
|
+
{ binding: 0, resource: { buffer: gpuWaveformBuffer } },
|
|
1695
|
+
{ binding: 1, resource: outputTexture.createView() },
|
|
1696
|
+
{ binding: 2, resource: { buffer: uniformBuffer } }
|
|
1697
|
+
]
|
|
1698
|
+
});
|
|
1699
|
+
waveformGpuBufferRef.current = gpuWaveformBuffer;
|
|
1700
|
+
outputTextureRef.current = outputTexture;
|
|
1701
|
+
uniformBufferRef.current = uniformBuffer;
|
|
1702
|
+
bindGroupRef.current = bindGroup;
|
|
1703
|
+
lastComputeResultRef.current = computeResult;
|
|
1704
|
+
lastDimensionsRef.current = { width: width2, height: height2 };
|
|
1705
|
+
}
|
|
1706
|
+
const uniformData = new ArrayBuffer(24);
|
|
1707
|
+
const uniforms = new DataView(uniformData);
|
|
1708
|
+
uniforms.setUint32(0, waveformPointCount, true);
|
|
1709
|
+
uniforms.setUint32(4, width2, true);
|
|
1710
|
+
uniforms.setUint32(8, height2, true);
|
|
1711
|
+
uniforms.setFloat32(12, color[0], true);
|
|
1712
|
+
uniforms.setFloat32(16, color[1], true);
|
|
1713
|
+
uniforms.setFloat32(20, color[2], true);
|
|
1714
|
+
device.queue.writeBuffer(uniformBufferRef.current, 0, uniformData);
|
|
1715
|
+
const commandEncoder = device.createCommandEncoder();
|
|
1716
|
+
const computePass = commandEncoder.beginComputePass();
|
|
1717
|
+
computePass.setPipeline(pipeline);
|
|
1718
|
+
computePass.setBindGroup(0, bindGroupRef.current);
|
|
1719
|
+
computePass.dispatchWorkgroups(Math.ceil(width2 / 64));
|
|
1720
|
+
computePass.end();
|
|
1721
|
+
device.queue.submit([commandEncoder.finish()]);
|
|
1722
|
+
blitReference.current.resize(width2, height2);
|
|
1723
|
+
blitReference.current.render(outputTextureRef.current);
|
|
1724
|
+
}, [computeResult, color[0], color[1], color[2]]);
|
|
1725
|
+
useEffect(
|
|
1726
|
+
() => () => {
|
|
1727
|
+
blitReference.current?.destroy();
|
|
1728
|
+
blitReference.current = null;
|
|
1729
|
+
pipelineReference.current = null;
|
|
1730
|
+
waveformGpuBufferRef.current?.destroy();
|
|
1731
|
+
waveformGpuBufferRef.current = null;
|
|
1732
|
+
outputTextureRef.current?.destroy();
|
|
1733
|
+
outputTextureRef.current = null;
|
|
1734
|
+
uniformBufferRef.current?.destroy();
|
|
1735
|
+
uniformBufferRef.current = null;
|
|
1736
|
+
bindGroupRef.current = null;
|
|
1737
|
+
},
|
|
1738
|
+
[]
|
|
1739
|
+
);
|
|
1740
|
+
const { width, height } = computeResult.status === "ready" ? computeResult.options.sampleQuery : { width: 0, height: 0 };
|
|
1741
|
+
return /* @__PURE__ */ jsx(
|
|
1742
|
+
"canvas",
|
|
1743
|
+
{
|
|
1744
|
+
ref: canvasCallback,
|
|
1745
|
+
width,
|
|
1746
|
+
height
|
|
1747
|
+
}
|
|
1748
|
+
);
|
|
1749
|
+
};
|
|
1750
|
+
|
|
1751
|
+
export { LoudnessCanvas, SpectrogramCanvas, WaveformCanvas, lavaColormap, useSpectralCompute, viridisColormap };
|