react-audio-wavekit 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +116 -0
- package/README.md +231 -0
- package/dist/constants.cjs +20 -0
- package/dist/constants.js +20 -0
- package/dist/index.cjs +12 -0
- package/dist/index.d.ts +235 -0
- package/dist/index.js +12 -0
- package/dist/recorder/live-recorder/index.cjs +125 -0
- package/dist/recorder/live-recorder/index.js +125 -0
- package/dist/recorder/live-streaming/recorder/recorder-compound.cjs +244 -0
- package/dist/recorder/live-streaming/recorder/recorder-compound.js +244 -0
- package/dist/recorder/live-streaming/recorder/recorder-context.cjs +20 -0
- package/dist/recorder/live-streaming/recorder/recorder-context.js +20 -0
- package/dist/recorder/live-streaming/stack-recorder/stack-recorder-compound.cjs +126 -0
- package/dist/recorder/live-streaming/stack-recorder/stack-recorder-compound.js +126 -0
- package/dist/recorder/live-streaming/use-recording-amplitudes.cjs +92 -0
- package/dist/recorder/live-streaming/use-recording-amplitudes.js +92 -0
- package/dist/recorder/use-audio-analyser.cjs +59 -0
- package/dist/recorder/use-audio-analyser.js +59 -0
- package/dist/recorder/use-audio-recorder.cjs +139 -0
- package/dist/recorder/use-audio-recorder.js +139 -0
- package/dist/recorder/util-mime-type.cjs +15 -0
- package/dist/recorder/util-mime-type.js +15 -0
- package/dist/waveform/index.cjs +73 -0
- package/dist/waveform/index.js +73 -0
- package/dist/waveform/util-audio-decoder.cjs +45 -0
- package/dist/waveform/util-audio-decoder.js +45 -0
- package/dist/waveform/util-suspense.cjs +24 -0
- package/dist/waveform/util-suspense.js +24 -0
- package/dist/waveform/waveform-renderer.cjs +105 -0
- package/dist/waveform/waveform-renderer.js +105 -0
- package/package.json +74 -0
|
@@ -0,0 +1,244 @@
|
|
|
1
|
+
import { jsx } from "react/jsx-runtime";
|
|
2
|
+
import { useOverlayScrollbars } from "overlayscrollbars-react";
|
|
3
|
+
import { forwardRef, useRef, useId, useEffect, useCallback } from "react";
|
|
4
|
+
import { DEFAULT_SCROLLBAR_APPEARANCE, DEFAULT_WAVEFORM_APPEARANCE } from "../../../constants.js";
|
|
5
|
+
import { LiveStreamingRecorderProvider, useLiveStreamingRecorderContext } from "./recorder-context.js";
|
|
6
|
+
const LiveStreamingRecorderRoot = forwardRef(
|
|
7
|
+
function LiveStreamingRecorderRoot2({
|
|
8
|
+
children,
|
|
9
|
+
className = "",
|
|
10
|
+
style,
|
|
11
|
+
mediaRecorder,
|
|
12
|
+
fftSize,
|
|
13
|
+
smoothingTimeConstant,
|
|
14
|
+
sampleInterval,
|
|
15
|
+
appearance,
|
|
16
|
+
...props
|
|
17
|
+
}, ref) {
|
|
18
|
+
const containerRef = useRef(null);
|
|
19
|
+
const uniqueId = useId().replace(/:/g, "");
|
|
20
|
+
const themeClassName = `os-theme-lsr-${uniqueId}`;
|
|
21
|
+
const scrollbar = appearance?.scrollbar;
|
|
22
|
+
const thumbColor = scrollbar?.thumbColor ?? DEFAULT_SCROLLBAR_APPEARANCE.thumbColor;
|
|
23
|
+
const hidden = scrollbar?.hidden ?? DEFAULT_SCROLLBAR_APPEARANCE.hidden;
|
|
24
|
+
const [initializeOS, osInstance] = useOverlayScrollbars({
|
|
25
|
+
options: {
|
|
26
|
+
overflow: { x: "scroll", y: "hidden" },
|
|
27
|
+
scrollbars: {
|
|
28
|
+
theme: themeClassName,
|
|
29
|
+
visibility: hidden ? "hidden" : "auto",
|
|
30
|
+
autoHide: "leave",
|
|
31
|
+
// 마우스가 영역을 벗어나면 숨김 (가장 대중적인 UX)
|
|
32
|
+
autoHideDelay: 400,
|
|
33
|
+
dragScroll: true,
|
|
34
|
+
clickScroll: true
|
|
35
|
+
}
|
|
36
|
+
},
|
|
37
|
+
defer: true
|
|
38
|
+
});
|
|
39
|
+
useEffect(() => {
|
|
40
|
+
if (containerRef.current) {
|
|
41
|
+
initializeOS(containerRef.current);
|
|
42
|
+
}
|
|
43
|
+
}, [initializeOS]);
|
|
44
|
+
useEffect(() => {
|
|
45
|
+
const instance = osInstance();
|
|
46
|
+
if (instance) {
|
|
47
|
+
instance.options({
|
|
48
|
+
scrollbars: {
|
|
49
|
+
visibility: hidden ? "hidden" : "auto"
|
|
50
|
+
}
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
}, [osInstance, hidden]);
|
|
54
|
+
useEffect(() => {
|
|
55
|
+
const styleId = `lsr-os-theme-${uniqueId}`;
|
|
56
|
+
document.getElementById(styleId)?.remove();
|
|
57
|
+
const styleElement = document.createElement("style");
|
|
58
|
+
styleElement.id = styleId;
|
|
59
|
+
styleElement.textContent = `
|
|
60
|
+
.${themeClassName} {
|
|
61
|
+
--os-size: 8px;
|
|
62
|
+
--os-padding-perpendicular: 2px;
|
|
63
|
+
--os-padding-axis: 2px;
|
|
64
|
+
--os-track-border-radius: 4px;
|
|
65
|
+
--os-track-bg: transparent;
|
|
66
|
+
--os-track-bg-hover: transparent;
|
|
67
|
+
--os-handle-border-radius: 4px;
|
|
68
|
+
--os-handle-bg: ${thumbColor};
|
|
69
|
+
--os-handle-bg-hover: ${thumbColor};
|
|
70
|
+
--os-handle-bg-active: ${thumbColor};
|
|
71
|
+
--os-handle-min-size: 30px;
|
|
72
|
+
}
|
|
73
|
+
`;
|
|
74
|
+
document.head.appendChild(styleElement);
|
|
75
|
+
return () => {
|
|
76
|
+
document.getElementById(styleId)?.remove();
|
|
77
|
+
};
|
|
78
|
+
}, [uniqueId, themeClassName, thumbColor]);
|
|
79
|
+
useEffect(() => {
|
|
80
|
+
if (ref) {
|
|
81
|
+
if (typeof ref === "function") {
|
|
82
|
+
ref(containerRef.current);
|
|
83
|
+
} else {
|
|
84
|
+
ref.current = containerRef.current;
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
}, [ref]);
|
|
88
|
+
return /* @__PURE__ */ jsx("div", { ref: containerRef, className, style, ...props, children: /* @__PURE__ */ jsx(
|
|
89
|
+
LiveStreamingRecorderProvider,
|
|
90
|
+
{
|
|
91
|
+
mediaRecorder,
|
|
92
|
+
fftSize,
|
|
93
|
+
smoothingTimeConstant,
|
|
94
|
+
sampleInterval,
|
|
95
|
+
children
|
|
96
|
+
}
|
|
97
|
+
) });
|
|
98
|
+
}
|
|
99
|
+
);
|
|
100
|
+
const LiveStreamingRecorderCanvas = forwardRef(
|
|
101
|
+
function LiveStreamingRecorderCanvas2({ className = "", style, appearance, growWidth = true, ...props }, ref) {
|
|
102
|
+
const { amplitudes, isRecording, isPaused } = useLiveStreamingRecorderContext();
|
|
103
|
+
const canvasRef = useRef(null);
|
|
104
|
+
const animationRef = useRef(null);
|
|
105
|
+
const containerSizeRef = useRef({ width: 0, height: 0 });
|
|
106
|
+
const containerRef = useRef(null);
|
|
107
|
+
const prevCanvasWidthRef = useRef(0);
|
|
108
|
+
useEffect(() => {
|
|
109
|
+
if (ref) {
|
|
110
|
+
if (typeof ref === "function") {
|
|
111
|
+
ref(canvasRef.current);
|
|
112
|
+
} else {
|
|
113
|
+
ref.current = canvasRef.current;
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
}, [ref]);
|
|
117
|
+
useEffect(() => {
|
|
118
|
+
if (amplitudes.length === 0) {
|
|
119
|
+
prevCanvasWidthRef.current = 0;
|
|
120
|
+
}
|
|
121
|
+
}, [amplitudes.length]);
|
|
122
|
+
const drawWaveform = useCallback(() => {
|
|
123
|
+
const canvas = canvasRef.current;
|
|
124
|
+
if (!canvas) return;
|
|
125
|
+
const ctx = canvas.getContext("2d");
|
|
126
|
+
if (!ctx) return;
|
|
127
|
+
const dpr = window.devicePixelRatio || 1;
|
|
128
|
+
const container = canvas.parentElement;
|
|
129
|
+
const containerWidth = container?.clientWidth || canvas.clientWidth;
|
|
130
|
+
const containerHeight = container?.clientHeight || canvas.clientHeight;
|
|
131
|
+
const barColor = appearance?.barColor ?? DEFAULT_WAVEFORM_APPEARANCE.barColor;
|
|
132
|
+
const barWidth = appearance?.barWidth ?? DEFAULT_WAVEFORM_APPEARANCE.barWidth;
|
|
133
|
+
const barGap = appearance?.barGap ?? DEFAULT_WAVEFORM_APPEARANCE.barGap;
|
|
134
|
+
const barRadius = appearance?.barRadius ?? DEFAULT_WAVEFORM_APPEARANCE.barRadius;
|
|
135
|
+
const barHeightScale = appearance?.barHeightScale ?? DEFAULT_WAVEFORM_APPEARANCE.barHeightScale;
|
|
136
|
+
const totalBarWidth = barWidth + barGap;
|
|
137
|
+
if (isRecording || amplitudes.length > 0) {
|
|
138
|
+
let canvasWidth;
|
|
139
|
+
if (growWidth) {
|
|
140
|
+
const requiredWidth = amplitudes.length * totalBarWidth;
|
|
141
|
+
const calculatedWidth = amplitudes.length > 0 ? requiredWidth : containerWidth;
|
|
142
|
+
canvasWidth = Math.max(calculatedWidth, prevCanvasWidthRef.current);
|
|
143
|
+
prevCanvasWidthRef.current = canvasWidth;
|
|
144
|
+
canvas.style.width = `${canvasWidth}px`;
|
|
145
|
+
} else {
|
|
146
|
+
canvasWidth = containerWidth;
|
|
147
|
+
canvas.style.width = "100%";
|
|
148
|
+
}
|
|
149
|
+
canvas.width = canvasWidth * dpr;
|
|
150
|
+
canvas.height = containerHeight * dpr;
|
|
151
|
+
ctx.scale(dpr, dpr);
|
|
152
|
+
ctx.clearRect(0, 0, canvasWidth, containerHeight);
|
|
153
|
+
ctx.fillStyle = barColor;
|
|
154
|
+
const minBarHeight = 2;
|
|
155
|
+
ctx.beginPath();
|
|
156
|
+
if (growWidth) {
|
|
157
|
+
for (let i = 0; i < amplitudes.length; i++) {
|
|
158
|
+
const amplitude = amplitudes[i];
|
|
159
|
+
const barHeight = Math.max(minBarHeight, amplitude * containerHeight * barHeightScale);
|
|
160
|
+
const x = i * totalBarWidth;
|
|
161
|
+
const y = (containerHeight - barHeight) / 2;
|
|
162
|
+
ctx.roundRect(x, y, barWidth, barHeight, barRadius);
|
|
163
|
+
}
|
|
164
|
+
} else {
|
|
165
|
+
const barsCount = Math.floor(canvasWidth / totalBarWidth);
|
|
166
|
+
const step = amplitudes.length / barsCount;
|
|
167
|
+
for (let i = 0; i < barsCount; i++) {
|
|
168
|
+
const amplitudeIndex = Math.min(Math.floor(i * step), amplitudes.length - 1);
|
|
169
|
+
const amplitude = amplitudes[amplitudeIndex] || 0;
|
|
170
|
+
const barHeight = Math.max(minBarHeight, amplitude * containerHeight * barHeightScale);
|
|
171
|
+
const x = i * totalBarWidth;
|
|
172
|
+
const y = (containerHeight - barHeight) / 2;
|
|
173
|
+
ctx.roundRect(x, y, barWidth, barHeight, barRadius);
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
ctx.fill();
|
|
177
|
+
}
|
|
178
|
+
}, [amplitudes, isRecording, appearance, growWidth]);
|
|
179
|
+
useEffect(() => {
|
|
180
|
+
const canvas = canvasRef.current;
|
|
181
|
+
if (!canvas) return;
|
|
182
|
+
const osContent = canvas.parentElement;
|
|
183
|
+
const osViewport = osContent?.parentElement;
|
|
184
|
+
if (osViewport?.classList.contains("os-viewport")) {
|
|
185
|
+
containerRef.current = osViewport;
|
|
186
|
+
} else {
|
|
187
|
+
containerRef.current = canvas.parentElement;
|
|
188
|
+
}
|
|
189
|
+
const resizeObserver = new ResizeObserver((entries) => {
|
|
190
|
+
const entry = entries[0];
|
|
191
|
+
if (!entry) return;
|
|
192
|
+
const { width, height } = entry.contentRect;
|
|
193
|
+
containerSizeRef.current = { width, height };
|
|
194
|
+
if (!isRecording) {
|
|
195
|
+
drawWaveform();
|
|
196
|
+
}
|
|
197
|
+
});
|
|
198
|
+
resizeObserver.observe(canvas);
|
|
199
|
+
return () => resizeObserver.disconnect();
|
|
200
|
+
}, [isRecording, drawWaveform]);
|
|
201
|
+
useEffect(() => {
|
|
202
|
+
if (isRecording && !isPaused) {
|
|
203
|
+
const draw = () => {
|
|
204
|
+
drawWaveform();
|
|
205
|
+
if (growWidth && containerRef.current) {
|
|
206
|
+
containerRef.current.scrollLeft = containerRef.current.scrollWidth;
|
|
207
|
+
}
|
|
208
|
+
animationRef.current = requestAnimationFrame(draw);
|
|
209
|
+
};
|
|
210
|
+
draw();
|
|
211
|
+
return () => {
|
|
212
|
+
if (animationRef.current) {
|
|
213
|
+
cancelAnimationFrame(animationRef.current);
|
|
214
|
+
animationRef.current = null;
|
|
215
|
+
}
|
|
216
|
+
};
|
|
217
|
+
}
|
|
218
|
+
drawWaveform();
|
|
219
|
+
}, [isRecording, isPaused, drawWaveform, growWidth]);
|
|
220
|
+
return /* @__PURE__ */ jsx(
|
|
221
|
+
"canvas",
|
|
222
|
+
{
|
|
223
|
+
ref: canvasRef,
|
|
224
|
+
className,
|
|
225
|
+
style: {
|
|
226
|
+
// Set to block in growWidth mode to allow self-determined width
|
|
227
|
+
display: growWidth ? "block" : void 0,
|
|
228
|
+
height: "100%",
|
|
229
|
+
...style
|
|
230
|
+
},
|
|
231
|
+
"aria-hidden": "true",
|
|
232
|
+
tabIndex: -1,
|
|
233
|
+
...props
|
|
234
|
+
}
|
|
235
|
+
);
|
|
236
|
+
}
|
|
237
|
+
);
|
|
238
|
+
const LiveStreamingRecorder = Object.assign(LiveStreamingRecorderRoot, {
|
|
239
|
+
Root: LiveStreamingRecorderRoot,
|
|
240
|
+
Canvas: LiveStreamingRecorderCanvas
|
|
241
|
+
});
|
|
242
|
+
export {
|
|
243
|
+
LiveStreamingRecorder
|
|
244
|
+
};
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
|
+
const jsxRuntime = require("react/jsx-runtime");
|
|
4
|
+
const react = require("react");
|
|
5
|
+
const useRecordingAmplitudes = require("../use-recording-amplitudes.cjs");
|
|
6
|
+
const LiveStreamingRecorderContext = react.createContext(null);
|
|
7
|
+
function LiveStreamingRecorderProvider({ children, ...options }) {
|
|
8
|
+
const value = useRecordingAmplitudes.useRecordingAmplitudes(options);
|
|
9
|
+
const content = typeof children === "function" ? children(value) : children;
|
|
10
|
+
return /* @__PURE__ */ jsxRuntime.jsx(LiveStreamingRecorderContext.Provider, { value, children: content });
|
|
11
|
+
}
|
|
12
|
+
function useLiveStreamingRecorderContext() {
|
|
13
|
+
const context = react.useContext(LiveStreamingRecorderContext);
|
|
14
|
+
if (!context) {
|
|
15
|
+
throw new Error("useLiveStreamingRecorderContext must be used within LiveStreamingRecorder.Root");
|
|
16
|
+
}
|
|
17
|
+
return context;
|
|
18
|
+
}
|
|
19
|
+
exports.LiveStreamingRecorderProvider = LiveStreamingRecorderProvider;
|
|
20
|
+
exports.useLiveStreamingRecorderContext = useLiveStreamingRecorderContext;
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import { jsx } from "react/jsx-runtime";
|
|
2
|
+
import { createContext, useContext } from "react";
|
|
3
|
+
import { useRecordingAmplitudes } from "../use-recording-amplitudes.js";
|
|
4
|
+
const LiveStreamingRecorderContext = createContext(null);
|
|
5
|
+
function LiveStreamingRecorderProvider({ children, ...options }) {
|
|
6
|
+
const value = useRecordingAmplitudes(options);
|
|
7
|
+
const content = typeof children === "function" ? children(value) : children;
|
|
8
|
+
return /* @__PURE__ */ jsx(LiveStreamingRecorderContext.Provider, { value, children: content });
|
|
9
|
+
}
|
|
10
|
+
function useLiveStreamingRecorderContext() {
|
|
11
|
+
const context = useContext(LiveStreamingRecorderContext);
|
|
12
|
+
if (!context) {
|
|
13
|
+
throw new Error("useLiveStreamingRecorderContext must be used within LiveStreamingRecorder.Root");
|
|
14
|
+
}
|
|
15
|
+
return context;
|
|
16
|
+
}
|
|
17
|
+
export {
|
|
18
|
+
LiveStreamingRecorderProvider,
|
|
19
|
+
useLiveStreamingRecorderContext
|
|
20
|
+
};
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
|
+
const jsxRuntime = require("react/jsx-runtime");
|
|
4
|
+
const react = require("react");
|
|
5
|
+
const constants = require("../../../constants.cjs");
|
|
6
|
+
const useRecordingAmplitudes = require("../use-recording-amplitudes.cjs");
|
|
7
|
+
const LiveStreamingStackRecorder = react.forwardRef(
|
|
8
|
+
function LiveStreamingStackRecorder2({ mediaRecorder, fftSize, smoothingTimeConstant, sampleInterval, appearance, className = "", style, ...props }, ref) {
|
|
9
|
+
const { amplitudes, isRecording, isPaused } = useRecordingAmplitudes.useRecordingAmplitudes({
|
|
10
|
+
mediaRecorder,
|
|
11
|
+
fftSize,
|
|
12
|
+
smoothingTimeConstant,
|
|
13
|
+
sampleInterval
|
|
14
|
+
});
|
|
15
|
+
const canvasRef = react.useRef(null);
|
|
16
|
+
const animationRef = react.useRef(null);
|
|
17
|
+
const sizeRef = react.useRef({ width: 0, height: 0 });
|
|
18
|
+
react.useEffect(() => {
|
|
19
|
+
if (ref) {
|
|
20
|
+
if (typeof ref === "function") {
|
|
21
|
+
ref(canvasRef.current);
|
|
22
|
+
} else {
|
|
23
|
+
ref.current = canvasRef.current;
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
}, [ref]);
|
|
27
|
+
const drawWaveform = react.useCallback(() => {
|
|
28
|
+
const canvas = canvasRef.current;
|
|
29
|
+
if (!canvas) return;
|
|
30
|
+
const ctx = canvas.getContext("2d");
|
|
31
|
+
if (!ctx) return;
|
|
32
|
+
const dpr = window.devicePixelRatio || 1;
|
|
33
|
+
const containerWidth = sizeRef.current.width;
|
|
34
|
+
const containerHeight = sizeRef.current.height;
|
|
35
|
+
if (containerWidth === 0 || containerHeight === 0) return;
|
|
36
|
+
const barColor = appearance?.barColor ?? constants.DEFAULT_WAVEFORM_APPEARANCE.barColor;
|
|
37
|
+
const barWidth = appearance?.barWidth ?? constants.DEFAULT_WAVEFORM_APPEARANCE.barWidth;
|
|
38
|
+
const barGap = appearance?.barGap ?? constants.DEFAULT_WAVEFORM_APPEARANCE.barGap;
|
|
39
|
+
const barRadius = appearance?.barRadius ?? constants.DEFAULT_WAVEFORM_APPEARANCE.barRadius;
|
|
40
|
+
const barHeightScale = appearance?.barHeightScale ?? constants.DEFAULT_WAVEFORM_APPEARANCE.barHeightScale;
|
|
41
|
+
const totalBarWidth = barWidth + barGap;
|
|
42
|
+
if (isRecording || amplitudes.length > 0) {
|
|
43
|
+
const canvasWidth = containerWidth;
|
|
44
|
+
ctx.setTransform(dpr, 0, 0, dpr, 0, 0);
|
|
45
|
+
ctx.clearRect(0, 0, canvasWidth, containerHeight);
|
|
46
|
+
ctx.fillStyle = barColor;
|
|
47
|
+
const minBarHeight = 2;
|
|
48
|
+
const maxBarsCount = Math.floor(canvasWidth / totalBarWidth);
|
|
49
|
+
const barsCount = Math.min(amplitudes.length, maxBarsCount);
|
|
50
|
+
const needsDownsample = amplitudes.length > maxBarsCount;
|
|
51
|
+
ctx.beginPath();
|
|
52
|
+
for (let i = 0; i < barsCount; i++) {
|
|
53
|
+
let amplitude;
|
|
54
|
+
if (needsDownsample) {
|
|
55
|
+
const startIdx = Math.floor(i * amplitudes.length / barsCount);
|
|
56
|
+
const endIdx = Math.floor((i + 1) * amplitudes.length / barsCount);
|
|
57
|
+
let maxAmplitude = 0;
|
|
58
|
+
for (let j = startIdx; j < endIdx; j++) {
|
|
59
|
+
maxAmplitude = Math.max(maxAmplitude, amplitudes[j] || 0);
|
|
60
|
+
}
|
|
61
|
+
amplitude = maxAmplitude;
|
|
62
|
+
} else {
|
|
63
|
+
amplitude = amplitudes[i] || 0;
|
|
64
|
+
}
|
|
65
|
+
const barHeight = Math.max(minBarHeight, amplitude * containerHeight * barHeightScale);
|
|
66
|
+
const x = Math.round(i * totalBarWidth);
|
|
67
|
+
const y = Math.round((containerHeight - barHeight) / 2);
|
|
68
|
+
const roundedBarHeight = Math.round(barHeight);
|
|
69
|
+
ctx.roundRect(x, y, barWidth, roundedBarHeight, barRadius);
|
|
70
|
+
}
|
|
71
|
+
ctx.fill();
|
|
72
|
+
}
|
|
73
|
+
}, [amplitudes, isRecording, appearance]);
|
|
74
|
+
react.useEffect(() => {
|
|
75
|
+
const canvas = canvasRef.current;
|
|
76
|
+
if (!canvas) return;
|
|
77
|
+
const resizeObserver = new ResizeObserver((entries) => {
|
|
78
|
+
const entry = entries[0];
|
|
79
|
+
if (!entry) return;
|
|
80
|
+
const { width, height } = entry.contentRect;
|
|
81
|
+
if (sizeRef.current.width === width && sizeRef.current.height === height) return;
|
|
82
|
+
sizeRef.current = { width, height };
|
|
83
|
+
const dpr = window.devicePixelRatio || 1;
|
|
84
|
+
canvas.width = width * dpr;
|
|
85
|
+
canvas.height = height * dpr;
|
|
86
|
+
if (!isRecording) {
|
|
87
|
+
drawWaveform();
|
|
88
|
+
}
|
|
89
|
+
});
|
|
90
|
+
resizeObserver.observe(canvas);
|
|
91
|
+
return () => resizeObserver.disconnect();
|
|
92
|
+
}, [isRecording, drawWaveform]);
|
|
93
|
+
react.useEffect(() => {
|
|
94
|
+
if (isRecording && !isPaused) {
|
|
95
|
+
const draw = () => {
|
|
96
|
+
drawWaveform();
|
|
97
|
+
animationRef.current = requestAnimationFrame(draw);
|
|
98
|
+
};
|
|
99
|
+
draw();
|
|
100
|
+
return () => {
|
|
101
|
+
if (animationRef.current) {
|
|
102
|
+
cancelAnimationFrame(animationRef.current);
|
|
103
|
+
animationRef.current = null;
|
|
104
|
+
}
|
|
105
|
+
};
|
|
106
|
+
}
|
|
107
|
+
drawWaveform();
|
|
108
|
+
}, [isRecording, isPaused, drawWaveform]);
|
|
109
|
+
return /* @__PURE__ */ jsxRuntime.jsx(
|
|
110
|
+
"canvas",
|
|
111
|
+
{
|
|
112
|
+
ref: canvasRef,
|
|
113
|
+
className,
|
|
114
|
+
style: {
|
|
115
|
+
width: "100%",
|
|
116
|
+
height: "100%",
|
|
117
|
+
...style
|
|
118
|
+
},
|
|
119
|
+
"aria-hidden": "true",
|
|
120
|
+
tabIndex: -1,
|
|
121
|
+
...props
|
|
122
|
+
}
|
|
123
|
+
);
|
|
124
|
+
}
|
|
125
|
+
);
|
|
126
|
+
exports.LiveStreamingStackRecorder = LiveStreamingStackRecorder;
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
import { jsx } from "react/jsx-runtime";
|
|
2
|
+
import { forwardRef, useRef, useEffect, useCallback } from "react";
|
|
3
|
+
import { DEFAULT_WAVEFORM_APPEARANCE } from "../../../constants.js";
|
|
4
|
+
import { useRecordingAmplitudes } from "../use-recording-amplitudes.js";
|
|
5
|
+
const LiveStreamingStackRecorder = forwardRef(
|
|
6
|
+
function LiveStreamingStackRecorder2({ mediaRecorder, fftSize, smoothingTimeConstant, sampleInterval, appearance, className = "", style, ...props }, ref) {
|
|
7
|
+
const { amplitudes, isRecording, isPaused } = useRecordingAmplitudes({
|
|
8
|
+
mediaRecorder,
|
|
9
|
+
fftSize,
|
|
10
|
+
smoothingTimeConstant,
|
|
11
|
+
sampleInterval
|
|
12
|
+
});
|
|
13
|
+
const canvasRef = useRef(null);
|
|
14
|
+
const animationRef = useRef(null);
|
|
15
|
+
const sizeRef = useRef({ width: 0, height: 0 });
|
|
16
|
+
useEffect(() => {
|
|
17
|
+
if (ref) {
|
|
18
|
+
if (typeof ref === "function") {
|
|
19
|
+
ref(canvasRef.current);
|
|
20
|
+
} else {
|
|
21
|
+
ref.current = canvasRef.current;
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
}, [ref]);
|
|
25
|
+
const drawWaveform = useCallback(() => {
|
|
26
|
+
const canvas = canvasRef.current;
|
|
27
|
+
if (!canvas) return;
|
|
28
|
+
const ctx = canvas.getContext("2d");
|
|
29
|
+
if (!ctx) return;
|
|
30
|
+
const dpr = window.devicePixelRatio || 1;
|
|
31
|
+
const containerWidth = sizeRef.current.width;
|
|
32
|
+
const containerHeight = sizeRef.current.height;
|
|
33
|
+
if (containerWidth === 0 || containerHeight === 0) return;
|
|
34
|
+
const barColor = appearance?.barColor ?? DEFAULT_WAVEFORM_APPEARANCE.barColor;
|
|
35
|
+
const barWidth = appearance?.barWidth ?? DEFAULT_WAVEFORM_APPEARANCE.barWidth;
|
|
36
|
+
const barGap = appearance?.barGap ?? DEFAULT_WAVEFORM_APPEARANCE.barGap;
|
|
37
|
+
const barRadius = appearance?.barRadius ?? DEFAULT_WAVEFORM_APPEARANCE.barRadius;
|
|
38
|
+
const barHeightScale = appearance?.barHeightScale ?? DEFAULT_WAVEFORM_APPEARANCE.barHeightScale;
|
|
39
|
+
const totalBarWidth = barWidth + barGap;
|
|
40
|
+
if (isRecording || amplitudes.length > 0) {
|
|
41
|
+
const canvasWidth = containerWidth;
|
|
42
|
+
ctx.setTransform(dpr, 0, 0, dpr, 0, 0);
|
|
43
|
+
ctx.clearRect(0, 0, canvasWidth, containerHeight);
|
|
44
|
+
ctx.fillStyle = barColor;
|
|
45
|
+
const minBarHeight = 2;
|
|
46
|
+
const maxBarsCount = Math.floor(canvasWidth / totalBarWidth);
|
|
47
|
+
const barsCount = Math.min(amplitudes.length, maxBarsCount);
|
|
48
|
+
const needsDownsample = amplitudes.length > maxBarsCount;
|
|
49
|
+
ctx.beginPath();
|
|
50
|
+
for (let i = 0; i < barsCount; i++) {
|
|
51
|
+
let amplitude;
|
|
52
|
+
if (needsDownsample) {
|
|
53
|
+
const startIdx = Math.floor(i * amplitudes.length / barsCount);
|
|
54
|
+
const endIdx = Math.floor((i + 1) * amplitudes.length / barsCount);
|
|
55
|
+
let maxAmplitude = 0;
|
|
56
|
+
for (let j = startIdx; j < endIdx; j++) {
|
|
57
|
+
maxAmplitude = Math.max(maxAmplitude, amplitudes[j] || 0);
|
|
58
|
+
}
|
|
59
|
+
amplitude = maxAmplitude;
|
|
60
|
+
} else {
|
|
61
|
+
amplitude = amplitudes[i] || 0;
|
|
62
|
+
}
|
|
63
|
+
const barHeight = Math.max(minBarHeight, amplitude * containerHeight * barHeightScale);
|
|
64
|
+
const x = Math.round(i * totalBarWidth);
|
|
65
|
+
const y = Math.round((containerHeight - barHeight) / 2);
|
|
66
|
+
const roundedBarHeight = Math.round(barHeight);
|
|
67
|
+
ctx.roundRect(x, y, barWidth, roundedBarHeight, barRadius);
|
|
68
|
+
}
|
|
69
|
+
ctx.fill();
|
|
70
|
+
}
|
|
71
|
+
}, [amplitudes, isRecording, appearance]);
|
|
72
|
+
useEffect(() => {
|
|
73
|
+
const canvas = canvasRef.current;
|
|
74
|
+
if (!canvas) return;
|
|
75
|
+
const resizeObserver = new ResizeObserver((entries) => {
|
|
76
|
+
const entry = entries[0];
|
|
77
|
+
if (!entry) return;
|
|
78
|
+
const { width, height } = entry.contentRect;
|
|
79
|
+
if (sizeRef.current.width === width && sizeRef.current.height === height) return;
|
|
80
|
+
sizeRef.current = { width, height };
|
|
81
|
+
const dpr = window.devicePixelRatio || 1;
|
|
82
|
+
canvas.width = width * dpr;
|
|
83
|
+
canvas.height = height * dpr;
|
|
84
|
+
if (!isRecording) {
|
|
85
|
+
drawWaveform();
|
|
86
|
+
}
|
|
87
|
+
});
|
|
88
|
+
resizeObserver.observe(canvas);
|
|
89
|
+
return () => resizeObserver.disconnect();
|
|
90
|
+
}, [isRecording, drawWaveform]);
|
|
91
|
+
useEffect(() => {
|
|
92
|
+
if (isRecording && !isPaused) {
|
|
93
|
+
const draw = () => {
|
|
94
|
+
drawWaveform();
|
|
95
|
+
animationRef.current = requestAnimationFrame(draw);
|
|
96
|
+
};
|
|
97
|
+
draw();
|
|
98
|
+
return () => {
|
|
99
|
+
if (animationRef.current) {
|
|
100
|
+
cancelAnimationFrame(animationRef.current);
|
|
101
|
+
animationRef.current = null;
|
|
102
|
+
}
|
|
103
|
+
};
|
|
104
|
+
}
|
|
105
|
+
drawWaveform();
|
|
106
|
+
}, [isRecording, isPaused, drawWaveform]);
|
|
107
|
+
return /* @__PURE__ */ jsx(
|
|
108
|
+
"canvas",
|
|
109
|
+
{
|
|
110
|
+
ref: canvasRef,
|
|
111
|
+
className,
|
|
112
|
+
style: {
|
|
113
|
+
width: "100%",
|
|
114
|
+
height: "100%",
|
|
115
|
+
...style
|
|
116
|
+
},
|
|
117
|
+
"aria-hidden": "true",
|
|
118
|
+
tabIndex: -1,
|
|
119
|
+
...props
|
|
120
|
+
}
|
|
121
|
+
);
|
|
122
|
+
}
|
|
123
|
+
);
|
|
124
|
+
export {
|
|
125
|
+
LiveStreamingStackRecorder
|
|
126
|
+
};
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
|
+
const react = require("react");
|
|
4
|
+
const useAudioAnalyser = require("../use-audio-analyser.cjs");
|
|
5
|
+
function useRecordingAmplitudes(options) {
|
|
6
|
+
const { mediaRecorder, fftSize = 2048, smoothingTimeConstant = 0.4, sampleInterval = 50 } = options;
|
|
7
|
+
const amplitudeDataRef = react.useRef([]);
|
|
8
|
+
const listenersRef = react.useRef(/* @__PURE__ */ new Set());
|
|
9
|
+
const samplingIntervalRef = react.useRef(null);
|
|
10
|
+
const subscribe = react.useCallback((onStoreChange) => {
|
|
11
|
+
listenersRef.current.add(onStoreChange);
|
|
12
|
+
return () => listenersRef.current.delete(onStoreChange);
|
|
13
|
+
}, []);
|
|
14
|
+
const getSnapshot = react.useCallback(() => amplitudeDataRef.current, []);
|
|
15
|
+
const notifyListeners = react.useCallback(() => {
|
|
16
|
+
for (const listener of listenersRef.current) {
|
|
17
|
+
listener();
|
|
18
|
+
}
|
|
19
|
+
}, []);
|
|
20
|
+
const amplitudes = react.useSyncExternalStore(subscribe, getSnapshot, getSnapshot);
|
|
21
|
+
const { audioContextRef, analyserRef, dataArrayRef, bufferLengthRef } = useAudioAnalyser.useAudioAnalyser({
|
|
22
|
+
mediaRecorder,
|
|
23
|
+
fftSize,
|
|
24
|
+
smoothingTimeConstant
|
|
25
|
+
});
|
|
26
|
+
const clearAmplitudes = react.useCallback(() => {
|
|
27
|
+
amplitudeDataRef.current = [];
|
|
28
|
+
notifyListeners();
|
|
29
|
+
}, [notifyListeners]);
|
|
30
|
+
const prevMediaRecorderRef = react.useRef(null);
|
|
31
|
+
react.useEffect(() => {
|
|
32
|
+
if (mediaRecorder !== prevMediaRecorderRef.current) {
|
|
33
|
+
amplitudeDataRef.current = [];
|
|
34
|
+
notifyListeners();
|
|
35
|
+
prevMediaRecorderRef.current = mediaRecorder;
|
|
36
|
+
}
|
|
37
|
+
}, [mediaRecorder, notifyListeners]);
|
|
38
|
+
react.useEffect(() => {
|
|
39
|
+
if (!mediaRecorder) {
|
|
40
|
+
return;
|
|
41
|
+
}
|
|
42
|
+
const sampleAmplitude = () => {
|
|
43
|
+
const analyser = analyserRef.current;
|
|
44
|
+
const dataArray = dataArrayRef.current;
|
|
45
|
+
const bufferLength = bufferLengthRef.current;
|
|
46
|
+
if (!analyser || !dataArray) return;
|
|
47
|
+
analyser.getByteTimeDomainData(dataArray);
|
|
48
|
+
let sum = 0;
|
|
49
|
+
for (let i = 0; i < bufferLength; i++) {
|
|
50
|
+
const normalized = (dataArray[i] - 128) / 128;
|
|
51
|
+
sum += normalized * normalized;
|
|
52
|
+
}
|
|
53
|
+
const rms = Math.sqrt(sum / bufferLength);
|
|
54
|
+
const amplitude = Math.min(1, rms * 2);
|
|
55
|
+
amplitudeDataRef.current.push(amplitude);
|
|
56
|
+
notifyListeners();
|
|
57
|
+
};
|
|
58
|
+
const startSampling = () => {
|
|
59
|
+
if (!samplingIntervalRef.current) {
|
|
60
|
+
samplingIntervalRef.current = window.setInterval(sampleAmplitude, sampleInterval);
|
|
61
|
+
}
|
|
62
|
+
};
|
|
63
|
+
const stopSampling = () => {
|
|
64
|
+
if (samplingIntervalRef.current) {
|
|
65
|
+
clearInterval(samplingIntervalRef.current);
|
|
66
|
+
samplingIntervalRef.current = null;
|
|
67
|
+
}
|
|
68
|
+
};
|
|
69
|
+
const handlePause = () => stopSampling();
|
|
70
|
+
const handleResume = () => startSampling();
|
|
71
|
+
mediaRecorder.addEventListener("pause", handlePause);
|
|
72
|
+
mediaRecorder.addEventListener("resume", handleResume);
|
|
73
|
+
const timeoutId = setTimeout(() => {
|
|
74
|
+
startSampling();
|
|
75
|
+
}, 50);
|
|
76
|
+
return () => {
|
|
77
|
+
clearTimeout(timeoutId);
|
|
78
|
+
mediaRecorder.removeEventListener("pause", handlePause);
|
|
79
|
+
mediaRecorder.removeEventListener("resume", handleResume);
|
|
80
|
+
stopSampling();
|
|
81
|
+
};
|
|
82
|
+
}, [mediaRecorder, sampleInterval, analyserRef, dataArrayRef, bufferLengthRef, notifyListeners]);
|
|
83
|
+
return {
|
|
84
|
+
amplitudes,
|
|
85
|
+
audioContext: audioContextRef.current,
|
|
86
|
+
analyser: analyserRef.current,
|
|
87
|
+
isRecording: mediaRecorder?.state === "recording",
|
|
88
|
+
isPaused: mediaRecorder?.state === "paused",
|
|
89
|
+
clearAmplitudes
|
|
90
|
+
};
|
|
91
|
+
}
|
|
92
|
+
exports.useRecordingAmplitudes = useRecordingAmplitudes;
|