react-audio-wavekit 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +116 -0
- package/README.md +231 -0
- package/dist/constants.cjs +20 -0
- package/dist/constants.js +20 -0
- package/dist/index.cjs +12 -0
- package/dist/index.d.ts +235 -0
- package/dist/index.js +12 -0
- package/dist/recorder/live-recorder/index.cjs +125 -0
- package/dist/recorder/live-recorder/index.js +125 -0
- package/dist/recorder/live-streaming/recorder/recorder-compound.cjs +244 -0
- package/dist/recorder/live-streaming/recorder/recorder-compound.js +244 -0
- package/dist/recorder/live-streaming/recorder/recorder-context.cjs +20 -0
- package/dist/recorder/live-streaming/recorder/recorder-context.js +20 -0
- package/dist/recorder/live-streaming/stack-recorder/stack-recorder-compound.cjs +126 -0
- package/dist/recorder/live-streaming/stack-recorder/stack-recorder-compound.js +126 -0
- package/dist/recorder/live-streaming/use-recording-amplitudes.cjs +92 -0
- package/dist/recorder/live-streaming/use-recording-amplitudes.js +92 -0
- package/dist/recorder/use-audio-analyser.cjs +59 -0
- package/dist/recorder/use-audio-analyser.js +59 -0
- package/dist/recorder/use-audio-recorder.cjs +139 -0
- package/dist/recorder/use-audio-recorder.js +139 -0
- package/dist/recorder/util-mime-type.cjs +15 -0
- package/dist/recorder/util-mime-type.js +15 -0
- package/dist/waveform/index.cjs +73 -0
- package/dist/waveform/index.js +73 -0
- package/dist/waveform/util-audio-decoder.cjs +45 -0
- package/dist/waveform/util-audio-decoder.js +45 -0
- package/dist/waveform/util-suspense.cjs +24 -0
- package/dist/waveform/util-suspense.js +24 -0
- package/dist/waveform/waveform-renderer.cjs +105 -0
- package/dist/waveform/waveform-renderer.js +105 -0
- package/package.json +74 -0
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
|
+
const jsxRuntime = require("react/jsx-runtime");
|
|
4
|
+
const react = require("react");
|
|
5
|
+
const useAudioAnalyser = require("../use-audio-analyser.cjs");
|
|
6
|
+
const LiveRecorder = react.forwardRef(
|
|
7
|
+
({
|
|
8
|
+
mediaRecorder,
|
|
9
|
+
className = "",
|
|
10
|
+
appearance,
|
|
11
|
+
fftSize = 2048,
|
|
12
|
+
smoothingTimeConstant = 0.8,
|
|
13
|
+
showIdleState = true,
|
|
14
|
+
...props
|
|
15
|
+
}, ref) => {
|
|
16
|
+
const canvasRef = react.useRef(null);
|
|
17
|
+
const animationRef = react.useRef(null);
|
|
18
|
+
const { audioContextRef, analyserRef, dataArrayRef, bufferLengthRef } = useAudioAnalyser.useAudioAnalyser({
|
|
19
|
+
mediaRecorder,
|
|
20
|
+
fftSize,
|
|
21
|
+
smoothingTimeConstant
|
|
22
|
+
});
|
|
23
|
+
react.useImperativeHandle(ref, () => ({
|
|
24
|
+
getCanvas: () => canvasRef.current,
|
|
25
|
+
getAudioContext: () => audioContextRef.current,
|
|
26
|
+
getAnalyser: () => analyserRef.current
|
|
27
|
+
}));
|
|
28
|
+
react.useEffect(() => {
|
|
29
|
+
if (!mediaRecorder || !canvasRef.current) {
|
|
30
|
+
return;
|
|
31
|
+
}
|
|
32
|
+
const canvas = canvasRef.current;
|
|
33
|
+
const barWidth = appearance?.barWidth ?? 3;
|
|
34
|
+
const barGap = appearance?.barGap ?? 1;
|
|
35
|
+
const barRadius = appearance?.barRadius ?? 1.5;
|
|
36
|
+
const barHeightScale = appearance?.barHeightScale ?? 0.95;
|
|
37
|
+
const barColor = appearance?.barColor ?? getComputedStyle(canvas).color ?? "#3b82f6";
|
|
38
|
+
const ctx = canvas.getContext("2d");
|
|
39
|
+
if (!ctx) return;
|
|
40
|
+
const dpr = window.devicePixelRatio || 1;
|
|
41
|
+
let isPaused = false;
|
|
42
|
+
const draw = () => {
|
|
43
|
+
if (isPaused) {
|
|
44
|
+
animationRef.current = requestAnimationFrame(draw);
|
|
45
|
+
return;
|
|
46
|
+
}
|
|
47
|
+
const analyser = analyserRef.current;
|
|
48
|
+
const dataArray = dataArrayRef.current;
|
|
49
|
+
const bufferLength = bufferLengthRef.current;
|
|
50
|
+
if (!analyser || !dataArray || !ctx) return;
|
|
51
|
+
const { width, height } = canvas.getBoundingClientRect();
|
|
52
|
+
canvas.width = width * dpr;
|
|
53
|
+
canvas.height = height * dpr;
|
|
54
|
+
ctx.scale(dpr, dpr);
|
|
55
|
+
analyser.getByteTimeDomainData(dataArray);
|
|
56
|
+
ctx.clearRect(0, 0, width, height);
|
|
57
|
+
const totalBarWidth = barWidth + barGap;
|
|
58
|
+
const numBars = Math.floor((width + barGap) / totalBarWidth);
|
|
59
|
+
ctx.fillStyle = barColor;
|
|
60
|
+
for (let i = 0; i < numBars; i++) {
|
|
61
|
+
const dataIndex = Math.floor(i / numBars * bufferLength);
|
|
62
|
+
const value = dataArray[dataIndex] || 0;
|
|
63
|
+
const amplitude = Math.abs(value - 128) / 128;
|
|
64
|
+
const barHeight = Math.max(2, amplitude * height * barHeightScale);
|
|
65
|
+
const x = i * totalBarWidth;
|
|
66
|
+
const y = (height - barHeight) / 2;
|
|
67
|
+
ctx.beginPath();
|
|
68
|
+
ctx.roundRect(x, y, barWidth, barHeight, barRadius);
|
|
69
|
+
ctx.fill();
|
|
70
|
+
}
|
|
71
|
+
animationRef.current = requestAnimationFrame(draw);
|
|
72
|
+
};
|
|
73
|
+
const handlePause = () => {
|
|
74
|
+
isPaused = true;
|
|
75
|
+
};
|
|
76
|
+
const handleResume = () => {
|
|
77
|
+
isPaused = false;
|
|
78
|
+
};
|
|
79
|
+
mediaRecorder.addEventListener("pause", handlePause);
|
|
80
|
+
mediaRecorder.addEventListener("resume", handleResume);
|
|
81
|
+
const timeoutId = setTimeout(() => {
|
|
82
|
+
draw();
|
|
83
|
+
}, 50);
|
|
84
|
+
return () => {
|
|
85
|
+
clearTimeout(timeoutId);
|
|
86
|
+
mediaRecorder.removeEventListener("pause", handlePause);
|
|
87
|
+
mediaRecorder.removeEventListener("resume", handleResume);
|
|
88
|
+
if (animationRef.current) {
|
|
89
|
+
cancelAnimationFrame(animationRef.current);
|
|
90
|
+
animationRef.current = null;
|
|
91
|
+
}
|
|
92
|
+
};
|
|
93
|
+
}, [mediaRecorder, appearance, analyserRef, dataArrayRef, bufferLengthRef]);
|
|
94
|
+
react.useEffect(() => {
|
|
95
|
+
if (mediaRecorder || !showIdleState || !canvasRef.current) return;
|
|
96
|
+
const canvas = canvasRef.current;
|
|
97
|
+
const ctx = canvas.getContext("2d");
|
|
98
|
+
if (!ctx) return;
|
|
99
|
+
const dpr = window.devicePixelRatio || 1;
|
|
100
|
+
const barWidth = appearance?.barWidth ?? 3;
|
|
101
|
+
const barGap = appearance?.barGap ?? 1;
|
|
102
|
+
const barRadius = appearance?.barRadius ?? 1.5;
|
|
103
|
+
const barColor = appearance?.barColor ?? getComputedStyle(canvas).color ?? "#3b82f6";
|
|
104
|
+
const { width, height } = canvas.getBoundingClientRect();
|
|
105
|
+
canvas.width = width * dpr;
|
|
106
|
+
canvas.height = height * dpr;
|
|
107
|
+
ctx.scale(dpr, dpr);
|
|
108
|
+
ctx.clearRect(0, 0, width, height);
|
|
109
|
+
ctx.fillStyle = barColor;
|
|
110
|
+
const minBarHeight = 2;
|
|
111
|
+
const totalBarWidth = barWidth + barGap;
|
|
112
|
+
const barCount = Math.floor((width + barGap) / totalBarWidth);
|
|
113
|
+
for (let i = 0; i < barCount; i++) {
|
|
114
|
+
const x = i * totalBarWidth;
|
|
115
|
+
const y = (height - minBarHeight) / 2;
|
|
116
|
+
ctx.beginPath();
|
|
117
|
+
ctx.roundRect(x, y, barWidth, minBarHeight, barRadius);
|
|
118
|
+
ctx.fill();
|
|
119
|
+
}
|
|
120
|
+
}, [mediaRecorder, appearance, showIdleState]);
|
|
121
|
+
return /* @__PURE__ */ jsxRuntime.jsx("canvas", { ref: canvasRef, className: `text-inherit ${className}`, "aria-hidden": "true", tabIndex: -1, ...props });
|
|
122
|
+
}
|
|
123
|
+
);
|
|
124
|
+
LiveRecorder.displayName = "LiveRecorder";
|
|
125
|
+
exports.LiveRecorder = LiveRecorder;
|
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
import { jsx } from "react/jsx-runtime";
|
|
2
|
+
import { forwardRef, useRef, useImperativeHandle, useEffect } from "react";
|
|
3
|
+
import { useAudioAnalyser } from "../use-audio-analyser.js";
|
|
4
|
+
const LiveRecorder = forwardRef(
|
|
5
|
+
({
|
|
6
|
+
mediaRecorder,
|
|
7
|
+
className = "",
|
|
8
|
+
appearance,
|
|
9
|
+
fftSize = 2048,
|
|
10
|
+
smoothingTimeConstant = 0.8,
|
|
11
|
+
showIdleState = true,
|
|
12
|
+
...props
|
|
13
|
+
}, ref) => {
|
|
14
|
+
const canvasRef = useRef(null);
|
|
15
|
+
const animationRef = useRef(null);
|
|
16
|
+
const { audioContextRef, analyserRef, dataArrayRef, bufferLengthRef } = useAudioAnalyser({
|
|
17
|
+
mediaRecorder,
|
|
18
|
+
fftSize,
|
|
19
|
+
smoothingTimeConstant
|
|
20
|
+
});
|
|
21
|
+
useImperativeHandle(ref, () => ({
|
|
22
|
+
getCanvas: () => canvasRef.current,
|
|
23
|
+
getAudioContext: () => audioContextRef.current,
|
|
24
|
+
getAnalyser: () => analyserRef.current
|
|
25
|
+
}));
|
|
26
|
+
useEffect(() => {
|
|
27
|
+
if (!mediaRecorder || !canvasRef.current) {
|
|
28
|
+
return;
|
|
29
|
+
}
|
|
30
|
+
const canvas = canvasRef.current;
|
|
31
|
+
const barWidth = appearance?.barWidth ?? 3;
|
|
32
|
+
const barGap = appearance?.barGap ?? 1;
|
|
33
|
+
const barRadius = appearance?.barRadius ?? 1.5;
|
|
34
|
+
const barHeightScale = appearance?.barHeightScale ?? 0.95;
|
|
35
|
+
const barColor = appearance?.barColor ?? getComputedStyle(canvas).color ?? "#3b82f6";
|
|
36
|
+
const ctx = canvas.getContext("2d");
|
|
37
|
+
if (!ctx) return;
|
|
38
|
+
const dpr = window.devicePixelRatio || 1;
|
|
39
|
+
let isPaused = false;
|
|
40
|
+
const draw = () => {
|
|
41
|
+
if (isPaused) {
|
|
42
|
+
animationRef.current = requestAnimationFrame(draw);
|
|
43
|
+
return;
|
|
44
|
+
}
|
|
45
|
+
const analyser = analyserRef.current;
|
|
46
|
+
const dataArray = dataArrayRef.current;
|
|
47
|
+
const bufferLength = bufferLengthRef.current;
|
|
48
|
+
if (!analyser || !dataArray || !ctx) return;
|
|
49
|
+
const { width, height } = canvas.getBoundingClientRect();
|
|
50
|
+
canvas.width = width * dpr;
|
|
51
|
+
canvas.height = height * dpr;
|
|
52
|
+
ctx.scale(dpr, dpr);
|
|
53
|
+
analyser.getByteTimeDomainData(dataArray);
|
|
54
|
+
ctx.clearRect(0, 0, width, height);
|
|
55
|
+
const totalBarWidth = barWidth + barGap;
|
|
56
|
+
const numBars = Math.floor((width + barGap) / totalBarWidth);
|
|
57
|
+
ctx.fillStyle = barColor;
|
|
58
|
+
for (let i = 0; i < numBars; i++) {
|
|
59
|
+
const dataIndex = Math.floor(i / numBars * bufferLength);
|
|
60
|
+
const value = dataArray[dataIndex] || 0;
|
|
61
|
+
const amplitude = Math.abs(value - 128) / 128;
|
|
62
|
+
const barHeight = Math.max(2, amplitude * height * barHeightScale);
|
|
63
|
+
const x = i * totalBarWidth;
|
|
64
|
+
const y = (height - barHeight) / 2;
|
|
65
|
+
ctx.beginPath();
|
|
66
|
+
ctx.roundRect(x, y, barWidth, barHeight, barRadius);
|
|
67
|
+
ctx.fill();
|
|
68
|
+
}
|
|
69
|
+
animationRef.current = requestAnimationFrame(draw);
|
|
70
|
+
};
|
|
71
|
+
const handlePause = () => {
|
|
72
|
+
isPaused = true;
|
|
73
|
+
};
|
|
74
|
+
const handleResume = () => {
|
|
75
|
+
isPaused = false;
|
|
76
|
+
};
|
|
77
|
+
mediaRecorder.addEventListener("pause", handlePause);
|
|
78
|
+
mediaRecorder.addEventListener("resume", handleResume);
|
|
79
|
+
const timeoutId = setTimeout(() => {
|
|
80
|
+
draw();
|
|
81
|
+
}, 50);
|
|
82
|
+
return () => {
|
|
83
|
+
clearTimeout(timeoutId);
|
|
84
|
+
mediaRecorder.removeEventListener("pause", handlePause);
|
|
85
|
+
mediaRecorder.removeEventListener("resume", handleResume);
|
|
86
|
+
if (animationRef.current) {
|
|
87
|
+
cancelAnimationFrame(animationRef.current);
|
|
88
|
+
animationRef.current = null;
|
|
89
|
+
}
|
|
90
|
+
};
|
|
91
|
+
}, [mediaRecorder, appearance, analyserRef, dataArrayRef, bufferLengthRef]);
|
|
92
|
+
useEffect(() => {
|
|
93
|
+
if (mediaRecorder || !showIdleState || !canvasRef.current) return;
|
|
94
|
+
const canvas = canvasRef.current;
|
|
95
|
+
const ctx = canvas.getContext("2d");
|
|
96
|
+
if (!ctx) return;
|
|
97
|
+
const dpr = window.devicePixelRatio || 1;
|
|
98
|
+
const barWidth = appearance?.barWidth ?? 3;
|
|
99
|
+
const barGap = appearance?.barGap ?? 1;
|
|
100
|
+
const barRadius = appearance?.barRadius ?? 1.5;
|
|
101
|
+
const barColor = appearance?.barColor ?? getComputedStyle(canvas).color ?? "#3b82f6";
|
|
102
|
+
const { width, height } = canvas.getBoundingClientRect();
|
|
103
|
+
canvas.width = width * dpr;
|
|
104
|
+
canvas.height = height * dpr;
|
|
105
|
+
ctx.scale(dpr, dpr);
|
|
106
|
+
ctx.clearRect(0, 0, width, height);
|
|
107
|
+
ctx.fillStyle = barColor;
|
|
108
|
+
const minBarHeight = 2;
|
|
109
|
+
const totalBarWidth = barWidth + barGap;
|
|
110
|
+
const barCount = Math.floor((width + barGap) / totalBarWidth);
|
|
111
|
+
for (let i = 0; i < barCount; i++) {
|
|
112
|
+
const x = i * totalBarWidth;
|
|
113
|
+
const y = (height - minBarHeight) / 2;
|
|
114
|
+
ctx.beginPath();
|
|
115
|
+
ctx.roundRect(x, y, barWidth, minBarHeight, barRadius);
|
|
116
|
+
ctx.fill();
|
|
117
|
+
}
|
|
118
|
+
}, [mediaRecorder, appearance, showIdleState]);
|
|
119
|
+
return /* @__PURE__ */ jsx("canvas", { ref: canvasRef, className: `text-inherit ${className}`, "aria-hidden": "true", tabIndex: -1, ...props });
|
|
120
|
+
}
|
|
121
|
+
);
|
|
122
|
+
LiveRecorder.displayName = "LiveRecorder";
|
|
123
|
+
export {
|
|
124
|
+
LiveRecorder
|
|
125
|
+
};
|
|
@@ -0,0 +1,244 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
|
+
const jsxRuntime = require("react/jsx-runtime");
|
|
4
|
+
const overlayscrollbarsReact = require("overlayscrollbars-react");
|
|
5
|
+
const react = require("react");
|
|
6
|
+
const constants = require("../../../constants.cjs");
|
|
7
|
+
const recorderContext = require("./recorder-context.cjs");
|
|
8
|
+
const LiveStreamingRecorderRoot = react.forwardRef(
|
|
9
|
+
function LiveStreamingRecorderRoot2({
|
|
10
|
+
children,
|
|
11
|
+
className = "",
|
|
12
|
+
style,
|
|
13
|
+
mediaRecorder,
|
|
14
|
+
fftSize,
|
|
15
|
+
smoothingTimeConstant,
|
|
16
|
+
sampleInterval,
|
|
17
|
+
appearance,
|
|
18
|
+
...props
|
|
19
|
+
}, ref) {
|
|
20
|
+
const containerRef = react.useRef(null);
|
|
21
|
+
const uniqueId = react.useId().replace(/:/g, "");
|
|
22
|
+
const themeClassName = `os-theme-lsr-${uniqueId}`;
|
|
23
|
+
const scrollbar = appearance?.scrollbar;
|
|
24
|
+
const thumbColor = scrollbar?.thumbColor ?? constants.DEFAULT_SCROLLBAR_APPEARANCE.thumbColor;
|
|
25
|
+
const hidden = scrollbar?.hidden ?? constants.DEFAULT_SCROLLBAR_APPEARANCE.hidden;
|
|
26
|
+
const [initializeOS, osInstance] = overlayscrollbarsReact.useOverlayScrollbars({
|
|
27
|
+
options: {
|
|
28
|
+
overflow: { x: "scroll", y: "hidden" },
|
|
29
|
+
scrollbars: {
|
|
30
|
+
theme: themeClassName,
|
|
31
|
+
visibility: hidden ? "hidden" : "auto",
|
|
32
|
+
autoHide: "leave",
|
|
33
|
+
// 마우스가 영역을 벗어나면 숨김 (가장 대중적인 UX)
|
|
34
|
+
autoHideDelay: 400,
|
|
35
|
+
dragScroll: true,
|
|
36
|
+
clickScroll: true
|
|
37
|
+
}
|
|
38
|
+
},
|
|
39
|
+
defer: true
|
|
40
|
+
});
|
|
41
|
+
react.useEffect(() => {
|
|
42
|
+
if (containerRef.current) {
|
|
43
|
+
initializeOS(containerRef.current);
|
|
44
|
+
}
|
|
45
|
+
}, [initializeOS]);
|
|
46
|
+
react.useEffect(() => {
|
|
47
|
+
const instance = osInstance();
|
|
48
|
+
if (instance) {
|
|
49
|
+
instance.options({
|
|
50
|
+
scrollbars: {
|
|
51
|
+
visibility: hidden ? "hidden" : "auto"
|
|
52
|
+
}
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
}, [osInstance, hidden]);
|
|
56
|
+
react.useEffect(() => {
|
|
57
|
+
const styleId = `lsr-os-theme-${uniqueId}`;
|
|
58
|
+
document.getElementById(styleId)?.remove();
|
|
59
|
+
const styleElement = document.createElement("style");
|
|
60
|
+
styleElement.id = styleId;
|
|
61
|
+
styleElement.textContent = `
|
|
62
|
+
.${themeClassName} {
|
|
63
|
+
--os-size: 8px;
|
|
64
|
+
--os-padding-perpendicular: 2px;
|
|
65
|
+
--os-padding-axis: 2px;
|
|
66
|
+
--os-track-border-radius: 4px;
|
|
67
|
+
--os-track-bg: transparent;
|
|
68
|
+
--os-track-bg-hover: transparent;
|
|
69
|
+
--os-handle-border-radius: 4px;
|
|
70
|
+
--os-handle-bg: ${thumbColor};
|
|
71
|
+
--os-handle-bg-hover: ${thumbColor};
|
|
72
|
+
--os-handle-bg-active: ${thumbColor};
|
|
73
|
+
--os-handle-min-size: 30px;
|
|
74
|
+
}
|
|
75
|
+
`;
|
|
76
|
+
document.head.appendChild(styleElement);
|
|
77
|
+
return () => {
|
|
78
|
+
document.getElementById(styleId)?.remove();
|
|
79
|
+
};
|
|
80
|
+
}, [uniqueId, themeClassName, thumbColor]);
|
|
81
|
+
react.useEffect(() => {
|
|
82
|
+
if (ref) {
|
|
83
|
+
if (typeof ref === "function") {
|
|
84
|
+
ref(containerRef.current);
|
|
85
|
+
} else {
|
|
86
|
+
ref.current = containerRef.current;
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
}, [ref]);
|
|
90
|
+
return /* @__PURE__ */ jsxRuntime.jsx("div", { ref: containerRef, className, style, ...props, children: /* @__PURE__ */ jsxRuntime.jsx(
|
|
91
|
+
recorderContext.LiveStreamingRecorderProvider,
|
|
92
|
+
{
|
|
93
|
+
mediaRecorder,
|
|
94
|
+
fftSize,
|
|
95
|
+
smoothingTimeConstant,
|
|
96
|
+
sampleInterval,
|
|
97
|
+
children
|
|
98
|
+
}
|
|
99
|
+
) });
|
|
100
|
+
}
|
|
101
|
+
);
|
|
102
|
+
const LiveStreamingRecorderCanvas = react.forwardRef(
|
|
103
|
+
function LiveStreamingRecorderCanvas2({ className = "", style, appearance, growWidth = true, ...props }, ref) {
|
|
104
|
+
const { amplitudes, isRecording, isPaused } = recorderContext.useLiveStreamingRecorderContext();
|
|
105
|
+
const canvasRef = react.useRef(null);
|
|
106
|
+
const animationRef = react.useRef(null);
|
|
107
|
+
const containerSizeRef = react.useRef({ width: 0, height: 0 });
|
|
108
|
+
const containerRef = react.useRef(null);
|
|
109
|
+
const prevCanvasWidthRef = react.useRef(0);
|
|
110
|
+
react.useEffect(() => {
|
|
111
|
+
if (ref) {
|
|
112
|
+
if (typeof ref === "function") {
|
|
113
|
+
ref(canvasRef.current);
|
|
114
|
+
} else {
|
|
115
|
+
ref.current = canvasRef.current;
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
}, [ref]);
|
|
119
|
+
react.useEffect(() => {
|
|
120
|
+
if (amplitudes.length === 0) {
|
|
121
|
+
prevCanvasWidthRef.current = 0;
|
|
122
|
+
}
|
|
123
|
+
}, [amplitudes.length]);
|
|
124
|
+
const drawWaveform = react.useCallback(() => {
|
|
125
|
+
const canvas = canvasRef.current;
|
|
126
|
+
if (!canvas) return;
|
|
127
|
+
const ctx = canvas.getContext("2d");
|
|
128
|
+
if (!ctx) return;
|
|
129
|
+
const dpr = window.devicePixelRatio || 1;
|
|
130
|
+
const container = canvas.parentElement;
|
|
131
|
+
const containerWidth = container?.clientWidth || canvas.clientWidth;
|
|
132
|
+
const containerHeight = container?.clientHeight || canvas.clientHeight;
|
|
133
|
+
const barColor = appearance?.barColor ?? constants.DEFAULT_WAVEFORM_APPEARANCE.barColor;
|
|
134
|
+
const barWidth = appearance?.barWidth ?? constants.DEFAULT_WAVEFORM_APPEARANCE.barWidth;
|
|
135
|
+
const barGap = appearance?.barGap ?? constants.DEFAULT_WAVEFORM_APPEARANCE.barGap;
|
|
136
|
+
const barRadius = appearance?.barRadius ?? constants.DEFAULT_WAVEFORM_APPEARANCE.barRadius;
|
|
137
|
+
const barHeightScale = appearance?.barHeightScale ?? constants.DEFAULT_WAVEFORM_APPEARANCE.barHeightScale;
|
|
138
|
+
const totalBarWidth = barWidth + barGap;
|
|
139
|
+
if (isRecording || amplitudes.length > 0) {
|
|
140
|
+
let canvasWidth;
|
|
141
|
+
if (growWidth) {
|
|
142
|
+
const requiredWidth = amplitudes.length * totalBarWidth;
|
|
143
|
+
const calculatedWidth = amplitudes.length > 0 ? requiredWidth : containerWidth;
|
|
144
|
+
canvasWidth = Math.max(calculatedWidth, prevCanvasWidthRef.current);
|
|
145
|
+
prevCanvasWidthRef.current = canvasWidth;
|
|
146
|
+
canvas.style.width = `${canvasWidth}px`;
|
|
147
|
+
} else {
|
|
148
|
+
canvasWidth = containerWidth;
|
|
149
|
+
canvas.style.width = "100%";
|
|
150
|
+
}
|
|
151
|
+
canvas.width = canvasWidth * dpr;
|
|
152
|
+
canvas.height = containerHeight * dpr;
|
|
153
|
+
ctx.scale(dpr, dpr);
|
|
154
|
+
ctx.clearRect(0, 0, canvasWidth, containerHeight);
|
|
155
|
+
ctx.fillStyle = barColor;
|
|
156
|
+
const minBarHeight = 2;
|
|
157
|
+
ctx.beginPath();
|
|
158
|
+
if (growWidth) {
|
|
159
|
+
for (let i = 0; i < amplitudes.length; i++) {
|
|
160
|
+
const amplitude = amplitudes[i];
|
|
161
|
+
const barHeight = Math.max(minBarHeight, amplitude * containerHeight * barHeightScale);
|
|
162
|
+
const x = i * totalBarWidth;
|
|
163
|
+
const y = (containerHeight - barHeight) / 2;
|
|
164
|
+
ctx.roundRect(x, y, barWidth, barHeight, barRadius);
|
|
165
|
+
}
|
|
166
|
+
} else {
|
|
167
|
+
const barsCount = Math.floor(canvasWidth / totalBarWidth);
|
|
168
|
+
const step = amplitudes.length / barsCount;
|
|
169
|
+
for (let i = 0; i < barsCount; i++) {
|
|
170
|
+
const amplitudeIndex = Math.min(Math.floor(i * step), amplitudes.length - 1);
|
|
171
|
+
const amplitude = amplitudes[amplitudeIndex] || 0;
|
|
172
|
+
const barHeight = Math.max(minBarHeight, amplitude * containerHeight * barHeightScale);
|
|
173
|
+
const x = i * totalBarWidth;
|
|
174
|
+
const y = (containerHeight - barHeight) / 2;
|
|
175
|
+
ctx.roundRect(x, y, barWidth, barHeight, barRadius);
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
ctx.fill();
|
|
179
|
+
}
|
|
180
|
+
}, [amplitudes, isRecording, appearance, growWidth]);
|
|
181
|
+
react.useEffect(() => {
|
|
182
|
+
const canvas = canvasRef.current;
|
|
183
|
+
if (!canvas) return;
|
|
184
|
+
const osContent = canvas.parentElement;
|
|
185
|
+
const osViewport = osContent?.parentElement;
|
|
186
|
+
if (osViewport?.classList.contains("os-viewport")) {
|
|
187
|
+
containerRef.current = osViewport;
|
|
188
|
+
} else {
|
|
189
|
+
containerRef.current = canvas.parentElement;
|
|
190
|
+
}
|
|
191
|
+
const resizeObserver = new ResizeObserver((entries) => {
|
|
192
|
+
const entry = entries[0];
|
|
193
|
+
if (!entry) return;
|
|
194
|
+
const { width, height } = entry.contentRect;
|
|
195
|
+
containerSizeRef.current = { width, height };
|
|
196
|
+
if (!isRecording) {
|
|
197
|
+
drawWaveform();
|
|
198
|
+
}
|
|
199
|
+
});
|
|
200
|
+
resizeObserver.observe(canvas);
|
|
201
|
+
return () => resizeObserver.disconnect();
|
|
202
|
+
}, [isRecording, drawWaveform]);
|
|
203
|
+
react.useEffect(() => {
|
|
204
|
+
if (isRecording && !isPaused) {
|
|
205
|
+
const draw = () => {
|
|
206
|
+
drawWaveform();
|
|
207
|
+
if (growWidth && containerRef.current) {
|
|
208
|
+
containerRef.current.scrollLeft = containerRef.current.scrollWidth;
|
|
209
|
+
}
|
|
210
|
+
animationRef.current = requestAnimationFrame(draw);
|
|
211
|
+
};
|
|
212
|
+
draw();
|
|
213
|
+
return () => {
|
|
214
|
+
if (animationRef.current) {
|
|
215
|
+
cancelAnimationFrame(animationRef.current);
|
|
216
|
+
animationRef.current = null;
|
|
217
|
+
}
|
|
218
|
+
};
|
|
219
|
+
}
|
|
220
|
+
drawWaveform();
|
|
221
|
+
}, [isRecording, isPaused, drawWaveform, growWidth]);
|
|
222
|
+
return /* @__PURE__ */ jsxRuntime.jsx(
|
|
223
|
+
"canvas",
|
|
224
|
+
{
|
|
225
|
+
ref: canvasRef,
|
|
226
|
+
className,
|
|
227
|
+
style: {
|
|
228
|
+
// Set to block in growWidth mode to allow self-determined width
|
|
229
|
+
display: growWidth ? "block" : void 0,
|
|
230
|
+
height: "100%",
|
|
231
|
+
...style
|
|
232
|
+
},
|
|
233
|
+
"aria-hidden": "true",
|
|
234
|
+
tabIndex: -1,
|
|
235
|
+
...props
|
|
236
|
+
}
|
|
237
|
+
);
|
|
238
|
+
}
|
|
239
|
+
);
|
|
240
|
+
const LiveStreamingRecorder = Object.assign(LiveStreamingRecorderRoot, {
|
|
241
|
+
Root: LiveStreamingRecorderRoot,
|
|
242
|
+
Canvas: LiveStreamingRecorderCanvas
|
|
243
|
+
});
|
|
244
|
+
exports.LiveStreamingRecorder = LiveStreamingRecorder;
|