@kernl-sdk/react 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +5 -0
- package/dist/components/live-waveform.d.ts +32 -0
- package/dist/components/live-waveform.d.ts.map +1 -0
- package/dist/components/live-waveform.js +422 -0
- package/dist/hooks/use-browser-audio.d.ts +34 -0
- package/dist/hooks/use-browser-audio.d.ts.map +1 -0
- package/dist/hooks/use-browser-audio.js +34 -0
- package/dist/hooks/use-realtime.d.ts +85 -0
- package/dist/hooks/use-realtime.d.ts.map +1 -0
- package/dist/hooks/use-realtime.js +78 -0
- package/dist/index.d.ts +8 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +7 -0
- package/dist/lib/audio-capture-worklet.d.ts +12 -0
- package/dist/lib/audio-capture-worklet.d.ts.map +1 -0
- package/dist/lib/audio-capture-worklet.js +80 -0
- package/dist/lib/browser-channel.d.ts +45 -0
- package/dist/lib/browser-channel.d.ts.map +1 -0
- package/dist/lib/browser-channel.js +144 -0
- package/dist/use-realtime.d.ts +68 -0
- package/dist/use-realtime.d.ts.map +1 -0
- package/dist/use-realtime.js +60 -0
- package/package.json +54 -0
- package/src/components/live-waveform.tsx +601 -0
- package/src/hooks/use-browser-audio.ts +50 -0
- package/src/hooks/use-realtime.ts +173 -0
- package/src/index.ts +17 -0
- package/src/lib/audio-capture-worklet.ts +82 -0
- package/src/lib/browser-channel.ts +178 -0
- package/tsconfig.json +15 -0
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import { type HTMLAttributes } from "react";
|
|
2
|
+
/**
|
|
3
|
+
* Audio source interface for LiveWaveform visualization.
|
|
4
|
+
*/
|
|
5
|
+
export interface AudioSource {
|
|
6
|
+
/** Analyser for speaker output (model audio). */
|
|
7
|
+
readonly output: AnalyserNode | null;
|
|
8
|
+
/** Analyser for mic input (user audio). */
|
|
9
|
+
readonly input: AnalyserNode | null;
|
|
10
|
+
}
|
|
11
|
+
export type LiveWaveformProps = HTMLAttributes<HTMLDivElement> & {
|
|
12
|
+
active?: boolean;
|
|
13
|
+
processing?: boolean;
|
|
14
|
+
/**
|
|
15
|
+
* Audio source for visualization (e.g., BrowserChannel).
|
|
16
|
+
*/
|
|
17
|
+
audio?: AudioSource | null;
|
|
18
|
+
barWidth?: number;
|
|
19
|
+
barHeight?: number;
|
|
20
|
+
barGap?: number;
|
|
21
|
+
barRadius?: number;
|
|
22
|
+
barColor?: string;
|
|
23
|
+
fadeEdges?: boolean;
|
|
24
|
+
fadeWidth?: number;
|
|
25
|
+
height?: string | number;
|
|
26
|
+
sensitivity?: number;
|
|
27
|
+
historySize?: number;
|
|
28
|
+
updateRate?: number;
|
|
29
|
+
mode?: "scrolling" | "static";
|
|
30
|
+
};
|
|
31
|
+
export declare function LiveWaveform({ active, processing, audio, barWidth, barGap, barRadius, barColor, fadeEdges, fadeWidth, barHeight: baseBarHeight, height, sensitivity, historySize, updateRate, mode, className, style, ...props }: LiveWaveformProps): import("react/jsx-runtime").JSX.Element;
|
|
32
|
+
//# sourceMappingURL=live-waveform.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"live-waveform.d.ts","sourceRoot":"","sources":["../../src/components/live-waveform.tsx"],"names":[],"mappings":"AAEA,OAAO,EAAqB,KAAK,cAAc,EAAE,MAAM,OAAO,CAAC;AAE/D;;GAEG;AACH,MAAM,WAAW,WAAW;IAC1B,iDAAiD;IACjD,QAAQ,CAAC,MAAM,EAAE,YAAY,GAAG,IAAI,CAAC;IACrC,2CAA2C;IAC3C,QAAQ,CAAC,KAAK,EAAE,YAAY,GAAG,IAAI,CAAC;CACrC;AAED,MAAM,MAAM,iBAAiB,GAAG,cAAc,CAAC,cAAc,CAAC,GAAG;IAC/D,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB;;OAEG;IACH,KAAK,CAAC,EAAE,WAAW,GAAG,IAAI,CAAC;IAC3B,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;IACzB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,IAAI,CAAC,EAAE,WAAW,GAAG,QAAQ,CAAC;CAC/B,CAAC;AAEF,wBAAgB,YAAY,CAAC,EAC3B,MAAc,EACd,UAAkB,EAClB,KAAK,EACL,QAAY,EACZ,MAAU,EACV,SAAe,EACf,QAAQ,EACR,SAAgB,EAChB,SAAc,EACd,SAAS,EAAE,aAAiB,EAC5B,MAAW,EACX,WAAe,EACf,WAAgB,EAChB,UAAe,EACf,IAAe,EACf,SAAS,EACT,KAAK,EACL,GAAG,KAAK,EACT,EAAE,iBAAiB,2CAkiBnB"}
|
|
@@ -0,0 +1,422 @@
|
|
|
1
|
+
"use client";
|
|
2
|
+
import { jsx as _jsx, jsxs as _jsxs } from "react/jsx-runtime";
|
|
3
|
+
import { useEffect, useRef } from "react";
|
|
4
|
+
export function LiveWaveform({ active = false, processing = false, audio, barWidth = 3, barGap = 1, barRadius = 1.5, barColor, fadeEdges = true, fadeWidth = 24, barHeight: baseBarHeight = 4, height = 64, sensitivity = 1, historySize = 60, updateRate = 30, mode = "static", className, style, ...props }) {
|
|
5
|
+
const canvasRef = useRef(null);
|
|
6
|
+
const containerRef = useRef(null);
|
|
7
|
+
const historyRef = useRef([]);
|
|
8
|
+
const lastUpdateRef = useRef(0);
|
|
9
|
+
const processingAnimationRef = useRef(null);
|
|
10
|
+
const lastActiveDataRef = useRef([]);
|
|
11
|
+
const transitionProgressRef = useRef(0);
|
|
12
|
+
const staticBarsRef = useRef([]);
|
|
13
|
+
const needsRedrawRef = useRef(true);
|
|
14
|
+
const gradientCacheRef = useRef(null);
|
|
15
|
+
const lastWidthRef = useRef(0);
|
|
16
|
+
// Listening state refs
|
|
17
|
+
const listeningBlendRef = useRef(0); // 0 = speaking, 1 = listening
|
|
18
|
+
const listeningTimeRef = useRef(0);
|
|
19
|
+
const silenceCountRef = useRef(0);
|
|
20
|
+
const heightStyle = typeof height === "number" ? `${height}px` : height;
|
|
21
|
+
// Handle canvas resizing
|
|
22
|
+
useEffect(() => {
|
|
23
|
+
const canvas = canvasRef.current;
|
|
24
|
+
const container = containerRef.current;
|
|
25
|
+
if (!canvas || !container)
|
|
26
|
+
return;
|
|
27
|
+
const resizeObserver = new ResizeObserver(() => {
|
|
28
|
+
const rect = container.getBoundingClientRect();
|
|
29
|
+
const dpr = window.devicePixelRatio || 1;
|
|
30
|
+
canvas.width = rect.width * dpr;
|
|
31
|
+
canvas.height = rect.height * dpr;
|
|
32
|
+
canvas.style.width = `${rect.width}px`;
|
|
33
|
+
canvas.style.height = `${rect.height}px`;
|
|
34
|
+
const ctx = canvas.getContext("2d");
|
|
35
|
+
if (ctx) {
|
|
36
|
+
ctx.scale(dpr, dpr);
|
|
37
|
+
}
|
|
38
|
+
gradientCacheRef.current = null;
|
|
39
|
+
lastWidthRef.current = rect.width;
|
|
40
|
+
needsRedrawRef.current = true;
|
|
41
|
+
});
|
|
42
|
+
resizeObserver.observe(container);
|
|
43
|
+
return () => resizeObserver.disconnect();
|
|
44
|
+
}, []);
|
|
45
|
+
useEffect(() => {
|
|
46
|
+
if (processing && !active) {
|
|
47
|
+
let time = 0;
|
|
48
|
+
transitionProgressRef.current = 0;
|
|
49
|
+
const animateProcessing = () => {
|
|
50
|
+
time += 0.03;
|
|
51
|
+
transitionProgressRef.current = Math.min(1, transitionProgressRef.current + 0.02);
|
|
52
|
+
const processingData = [];
|
|
53
|
+
const barCount = Math.floor((containerRef.current?.getBoundingClientRect().width || 200) /
|
|
54
|
+
(barWidth + barGap));
|
|
55
|
+
if (mode === "static") {
|
|
56
|
+
const halfCount = Math.floor(barCount / 2);
|
|
57
|
+
for (let i = 0; i < barCount; i++) {
|
|
58
|
+
const normalizedPosition = (i - halfCount) / halfCount;
|
|
59
|
+
const centerWeight = 1 - Math.abs(normalizedPosition) * 0.4;
|
|
60
|
+
const wave1 = Math.sin(time * 1.5 + normalizedPosition * 3) * 0.25;
|
|
61
|
+
const wave2 = Math.sin(time * 0.8 - normalizedPosition * 2) * 0.2;
|
|
62
|
+
const wave3 = Math.cos(time * 2 + normalizedPosition) * 0.15;
|
|
63
|
+
const combinedWave = wave1 + wave2 + wave3;
|
|
64
|
+
const processingValue = (0.2 + combinedWave) * centerWeight;
|
|
65
|
+
let finalValue = processingValue;
|
|
66
|
+
if (lastActiveDataRef.current.length > 0 &&
|
|
67
|
+
transitionProgressRef.current < 1) {
|
|
68
|
+
const lastDataIndex = Math.min(i, lastActiveDataRef.current.length - 1);
|
|
69
|
+
const lastValue = lastActiveDataRef.current[lastDataIndex] || 0;
|
|
70
|
+
finalValue =
|
|
71
|
+
lastValue * (1 - transitionProgressRef.current) +
|
|
72
|
+
processingValue * transitionProgressRef.current;
|
|
73
|
+
}
|
|
74
|
+
processingData.push(Math.max(0.05, Math.min(1, finalValue)));
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
else {
|
|
78
|
+
for (let i = 0; i < barCount; i++) {
|
|
79
|
+
const normalizedPosition = (i - barCount / 2) / (barCount / 2);
|
|
80
|
+
const centerWeight = 1 - Math.abs(normalizedPosition) * 0.4;
|
|
81
|
+
const wave1 = Math.sin(time * 1.5 + i * 0.15) * 0.25;
|
|
82
|
+
const wave2 = Math.sin(time * 0.8 - i * 0.1) * 0.2;
|
|
83
|
+
const wave3 = Math.cos(time * 2 + i * 0.05) * 0.15;
|
|
84
|
+
const combinedWave = wave1 + wave2 + wave3;
|
|
85
|
+
const processingValue = (0.2 + combinedWave) * centerWeight;
|
|
86
|
+
let finalValue = processingValue;
|
|
87
|
+
if (lastActiveDataRef.current.length > 0 &&
|
|
88
|
+
transitionProgressRef.current < 1) {
|
|
89
|
+
const lastDataIndex = Math.floor((i / barCount) * lastActiveDataRef.current.length);
|
|
90
|
+
const lastValue = lastActiveDataRef.current[lastDataIndex] || 0;
|
|
91
|
+
finalValue =
|
|
92
|
+
lastValue * (1 - transitionProgressRef.current) +
|
|
93
|
+
processingValue * transitionProgressRef.current;
|
|
94
|
+
}
|
|
95
|
+
processingData.push(Math.max(0.05, Math.min(1, finalValue)));
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
if (mode === "static") {
|
|
99
|
+
staticBarsRef.current = processingData;
|
|
100
|
+
}
|
|
101
|
+
else {
|
|
102
|
+
historyRef.current = processingData;
|
|
103
|
+
}
|
|
104
|
+
needsRedrawRef.current = true;
|
|
105
|
+
processingAnimationRef.current =
|
|
106
|
+
requestAnimationFrame(animateProcessing);
|
|
107
|
+
};
|
|
108
|
+
animateProcessing();
|
|
109
|
+
return () => {
|
|
110
|
+
if (processingAnimationRef.current) {
|
|
111
|
+
cancelAnimationFrame(processingAnimationRef.current);
|
|
112
|
+
}
|
|
113
|
+
};
|
|
114
|
+
}
|
|
115
|
+
else if (!active && !processing) {
|
|
116
|
+
// Reset listening state
|
|
117
|
+
listeningBlendRef.current = 0;
|
|
118
|
+
listeningTimeRef.current = 0;
|
|
119
|
+
silenceCountRef.current = 0;
|
|
120
|
+
const hasData = mode === "static"
|
|
121
|
+
? staticBarsRef.current.length > 0
|
|
122
|
+
: historyRef.current.length > 0;
|
|
123
|
+
if (hasData) {
|
|
124
|
+
let fadeProgress = 0;
|
|
125
|
+
const fadeToIdle = () => {
|
|
126
|
+
fadeProgress += 0.03;
|
|
127
|
+
if (fadeProgress < 1) {
|
|
128
|
+
if (mode === "static") {
|
|
129
|
+
staticBarsRef.current = staticBarsRef.current.map((value) => value * (1 - fadeProgress));
|
|
130
|
+
}
|
|
131
|
+
else {
|
|
132
|
+
historyRef.current = historyRef.current.map((value) => value * (1 - fadeProgress));
|
|
133
|
+
}
|
|
134
|
+
needsRedrawRef.current = true;
|
|
135
|
+
requestAnimationFrame(fadeToIdle);
|
|
136
|
+
}
|
|
137
|
+
else {
|
|
138
|
+
if (mode === "static") {
|
|
139
|
+
staticBarsRef.current = [];
|
|
140
|
+
}
|
|
141
|
+
else {
|
|
142
|
+
historyRef.current = [];
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
};
|
|
146
|
+
fadeToIdle();
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
}, [processing, active, barWidth, barGap, mode]);
|
|
150
|
+
// Animation loop
|
|
151
|
+
useEffect(() => {
|
|
152
|
+
const canvas = canvasRef.current;
|
|
153
|
+
if (!canvas)
|
|
154
|
+
return;
|
|
155
|
+
const ctx = canvas.getContext("2d");
|
|
156
|
+
if (!ctx)
|
|
157
|
+
return;
|
|
158
|
+
let rafId;
|
|
159
|
+
const animate = (currentTime) => {
|
|
160
|
+
// Read analysers directly each frame (properties populate after init)
|
|
161
|
+
const outputAnalyser = audio?.output;
|
|
162
|
+
const inputAnalyser = audio?.input;
|
|
163
|
+
// Render waveform
|
|
164
|
+
const rect = canvas.getBoundingClientRect();
|
|
165
|
+
// Update audio data if active
|
|
166
|
+
if (active && currentTime - lastUpdateRef.current > updateRate) {
|
|
167
|
+
lastUpdateRef.current = currentTime;
|
|
168
|
+
const barCount = Math.floor(rect.width / (barWidth + barGap));
|
|
169
|
+
const halfCount = Math.floor(barCount / 2);
|
|
170
|
+
// Threshold for detecting silence (agent not speaking)
|
|
171
|
+
const silenceThreshold = 0.08;
|
|
172
|
+
const silenceFramesRequired = 2;
|
|
173
|
+
const transitionSpeed = 0.18;
|
|
174
|
+
if (outputAnalyser) {
|
|
175
|
+
const dataArray = new Uint8Array(outputAnalyser.frequencyBinCount);
|
|
176
|
+
outputAnalyser.getByteFrequencyData(dataArray);
|
|
177
|
+
// Calculate average audio level
|
|
178
|
+
const startFreq = Math.floor(dataArray.length * 0.05);
|
|
179
|
+
const endFreq = Math.floor(dataArray.length * 0.4);
|
|
180
|
+
const relevantData = dataArray.slice(startFreq, endFreq);
|
|
181
|
+
let sum = 0;
|
|
182
|
+
for (let i = 0; i < relevantData.length; i++) {
|
|
183
|
+
sum += relevantData[i];
|
|
184
|
+
}
|
|
185
|
+
const avgLevel = sum / relevantData.length / 255;
|
|
186
|
+
// Update silence detection with hysteresis
|
|
187
|
+
if (avgLevel < silenceThreshold) {
|
|
188
|
+
silenceCountRef.current++;
|
|
189
|
+
}
|
|
190
|
+
else {
|
|
191
|
+
silenceCountRef.current = 0;
|
|
192
|
+
}
|
|
193
|
+
const isListening = silenceCountRef.current >= silenceFramesRequired;
|
|
194
|
+
// Smoothly transition listening blend
|
|
195
|
+
if (isListening) {
|
|
196
|
+
listeningBlendRef.current = Math.min(1, listeningBlendRef.current + transitionSpeed);
|
|
197
|
+
}
|
|
198
|
+
else {
|
|
199
|
+
listeningBlendRef.current = Math.max(0, listeningBlendRef.current - transitionSpeed * 2);
|
|
200
|
+
}
|
|
201
|
+
if (mode === "static") {
|
|
202
|
+
const newBars = [];
|
|
203
|
+
// Generate speaking bars from audio data
|
|
204
|
+
const speakingBars = [];
|
|
205
|
+
for (let i = halfCount - 1; i >= 0; i--) {
|
|
206
|
+
const dataIndex = Math.floor((i / halfCount) * relevantData.length);
|
|
207
|
+
const value = Math.min(1, (relevantData[dataIndex] / 255) * sensitivity);
|
|
208
|
+
speakingBars.push(Math.max(0.05, value));
|
|
209
|
+
}
|
|
210
|
+
for (let i = 0; i < halfCount; i++) {
|
|
211
|
+
const dataIndex = Math.floor((i / halfCount) * relevantData.length);
|
|
212
|
+
const value = Math.min(1, (relevantData[dataIndex] / 255) * sensitivity);
|
|
213
|
+
speakingBars.push(Math.max(0.05, value));
|
|
214
|
+
}
|
|
215
|
+
// Generate listening pattern (U-shape modulated by mic input)
|
|
216
|
+
listeningTimeRef.current += 0.05;
|
|
217
|
+
// Get mic input level for modulation
|
|
218
|
+
let micLevel = 0;
|
|
219
|
+
if (inputAnalyser) {
|
|
220
|
+
const inputData = new Uint8Array(inputAnalyser.frequencyBinCount);
|
|
221
|
+
inputAnalyser.getByteFrequencyData(inputData);
|
|
222
|
+
let inputSum = 0;
|
|
223
|
+
for (let j = 0; j < inputData.length; j++) {
|
|
224
|
+
inputSum += inputData[j];
|
|
225
|
+
}
|
|
226
|
+
micLevel = inputSum / inputData.length / 255;
|
|
227
|
+
}
|
|
228
|
+
// Modulation: mic input drives the pulse, with subtle idle animation
|
|
229
|
+
const idleBreath = 0.6 + Math.sin(listeningTimeRef.current * 0.8) * 0.1;
|
|
230
|
+
const micBoost = micLevel * 1.5; // amplify mic response
|
|
231
|
+
const breathe = Math.min(1, idleBreath + micBoost);
|
|
232
|
+
const secondaryBreath = micLevel * 0.2;
|
|
233
|
+
for (let i = 0; i < barCount; i++) {
|
|
234
|
+
// Distance from center (0 at center, 1 at edges)
|
|
235
|
+
const distFromCenter = Math.abs(i - halfCount) / halfCount;
|
|
236
|
+
// U-shape: higher at edges, lower at center
|
|
237
|
+
// Use smoothstep for nice curve
|
|
238
|
+
const t = distFromCenter;
|
|
239
|
+
const uShape = t * t * (3 - 2 * t); // smoothstep
|
|
240
|
+
// Listening value: base + u-shape, with mic-driven breathing
|
|
241
|
+
const listeningValue = (0.05 + uShape * 0.55) * breathe + secondaryBreath * uShape;
|
|
242
|
+
// Blend between speaking and listening
|
|
243
|
+
const blend = listeningBlendRef.current;
|
|
244
|
+
const speakingValue = speakingBars[i] || 0.05;
|
|
245
|
+
const blendedValue = speakingValue * (1 - blend) + listeningValue * blend;
|
|
246
|
+
newBars.push(Math.max(0.05, Math.min(1, blendedValue)));
|
|
247
|
+
}
|
|
248
|
+
staticBarsRef.current = newBars;
|
|
249
|
+
lastActiveDataRef.current = newBars;
|
|
250
|
+
}
|
|
251
|
+
else {
|
|
252
|
+
// Scrolling mode - original behavior
|
|
253
|
+
const average = (sum / relevantData.length / 255) * sensitivity;
|
|
254
|
+
historyRef.current.push(Math.min(1, Math.max(0.05, average)));
|
|
255
|
+
lastActiveDataRef.current = [...historyRef.current];
|
|
256
|
+
if (historyRef.current.length > historySize) {
|
|
257
|
+
historyRef.current.shift();
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
needsRedrawRef.current = true;
|
|
261
|
+
}
|
|
262
|
+
else if (active) {
|
|
263
|
+
// No analyser but active - show listening pattern
|
|
264
|
+
listeningBlendRef.current = Math.min(1, listeningBlendRef.current + transitionSpeed);
|
|
265
|
+
listeningTimeRef.current += 0.05;
|
|
266
|
+
// Get mic input level for modulation
|
|
267
|
+
let micLevel = 0;
|
|
268
|
+
if (inputAnalyser) {
|
|
269
|
+
const inputData = new Uint8Array(inputAnalyser.frequencyBinCount);
|
|
270
|
+
inputAnalyser.getByteFrequencyData(inputData);
|
|
271
|
+
let inputSum = 0;
|
|
272
|
+
for (let j = 0; j < inputData.length; j++) {
|
|
273
|
+
inputSum += inputData[j];
|
|
274
|
+
}
|
|
275
|
+
micLevel = inputSum / inputData.length / 255;
|
|
276
|
+
}
|
|
277
|
+
const idleBreath = 0.6 + Math.sin(listeningTimeRef.current * 0.8) * 0.1;
|
|
278
|
+
const micBoost = micLevel * 1.5;
|
|
279
|
+
const breathe = Math.min(1, idleBreath + micBoost);
|
|
280
|
+
const secondaryBreath = micLevel * 0.2;
|
|
281
|
+
const newBars = [];
|
|
282
|
+
for (let i = 0; i < barCount; i++) {
|
|
283
|
+
const distFromCenter = Math.abs(i - halfCount) / halfCount;
|
|
284
|
+
const t = distFromCenter;
|
|
285
|
+
const uShape = t * t * (3 - 2 * t);
|
|
286
|
+
const listeningValue = (0.05 + uShape * 0.55) * breathe + secondaryBreath * uShape;
|
|
287
|
+
newBars.push(Math.max(0.05, Math.min(1, listeningValue)));
|
|
288
|
+
}
|
|
289
|
+
staticBarsRef.current = newBars;
|
|
290
|
+
needsRedrawRef.current = true;
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
// Only redraw if needed
|
|
294
|
+
if (!needsRedrawRef.current && !active) {
|
|
295
|
+
rafId = requestAnimationFrame(animate);
|
|
296
|
+
return;
|
|
297
|
+
}
|
|
298
|
+
needsRedrawRef.current = active;
|
|
299
|
+
ctx.clearRect(0, 0, rect.width, rect.height);
|
|
300
|
+
const computedBarColor = barColor ||
|
|
301
|
+
(() => {
|
|
302
|
+
const style = getComputedStyle(canvas);
|
|
303
|
+
// Try to get the computed color value directly
|
|
304
|
+
const color = style.color;
|
|
305
|
+
return color || "#000";
|
|
306
|
+
})();
|
|
307
|
+
const step = barWidth + barGap;
|
|
308
|
+
const barCount = Math.floor(rect.width / step);
|
|
309
|
+
const centerY = rect.height / 2;
|
|
310
|
+
// Draw bars based on mode
|
|
311
|
+
if (mode === "static") {
|
|
312
|
+
// Static mode - bars in fixed positions
|
|
313
|
+
const dataToRender = processing
|
|
314
|
+
? staticBarsRef.current
|
|
315
|
+
: active
|
|
316
|
+
? staticBarsRef.current
|
|
317
|
+
: staticBarsRef.current.length > 0
|
|
318
|
+
? staticBarsRef.current
|
|
319
|
+
: [];
|
|
320
|
+
for (let i = 0; i < barCount && i < dataToRender.length; i++) {
|
|
321
|
+
const value = dataToRender[i] || 0.1;
|
|
322
|
+
const x = i * step;
|
|
323
|
+
const barHeight = Math.max(baseBarHeight, value * rect.height * 0.8);
|
|
324
|
+
const y = centerY - barHeight / 2;
|
|
325
|
+
ctx.fillStyle = computedBarColor;
|
|
326
|
+
ctx.globalAlpha = 0.4 + value * 0.6;
|
|
327
|
+
if (barRadius > 0) {
|
|
328
|
+
ctx.beginPath();
|
|
329
|
+
ctx.roundRect(x, y, barWidth, barHeight, barRadius);
|
|
330
|
+
ctx.fill();
|
|
331
|
+
}
|
|
332
|
+
else {
|
|
333
|
+
ctx.fillRect(x, y, barWidth, barHeight);
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
}
|
|
337
|
+
else {
|
|
338
|
+
// Scrolling mode - original behavior
|
|
339
|
+
for (let i = 0; i < barCount && i < historyRef.current.length; i++) {
|
|
340
|
+
const dataIndex = historyRef.current.length - 1 - i;
|
|
341
|
+
const value = historyRef.current[dataIndex] || 0.1;
|
|
342
|
+
const x = rect.width - (i + 1) * step;
|
|
343
|
+
const barHeight = Math.max(baseBarHeight, value * rect.height * 0.8);
|
|
344
|
+
const y = centerY - barHeight / 2;
|
|
345
|
+
ctx.fillStyle = computedBarColor;
|
|
346
|
+
ctx.globalAlpha = 0.4 + value * 0.6;
|
|
347
|
+
if (barRadius > 0) {
|
|
348
|
+
ctx.beginPath();
|
|
349
|
+
ctx.roundRect(x, y, barWidth, barHeight, barRadius);
|
|
350
|
+
ctx.fill();
|
|
351
|
+
}
|
|
352
|
+
else {
|
|
353
|
+
ctx.fillRect(x, y, barWidth, barHeight);
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
}
|
|
357
|
+
// Apply edge fading
|
|
358
|
+
if (fadeEdges && fadeWidth > 0 && rect.width > 0) {
|
|
359
|
+
// Cache gradient if width hasn't changed
|
|
360
|
+
if (!gradientCacheRef.current || lastWidthRef.current !== rect.width) {
|
|
361
|
+
const gradient = ctx.createLinearGradient(0, 0, rect.width, 0);
|
|
362
|
+
const fadePercent = Math.min(0.3, fadeWidth / rect.width);
|
|
363
|
+
// destination-out: removes destination where source alpha is high
|
|
364
|
+
// We want: fade edges out, keep center solid
|
|
365
|
+
// Left edge: start opaque (1) = remove, fade to transparent (0) = keep
|
|
366
|
+
gradient.addColorStop(0, "rgba(255,255,255,1)");
|
|
367
|
+
gradient.addColorStop(fadePercent, "rgba(255,255,255,0)");
|
|
368
|
+
// Center stays transparent = keep everything
|
|
369
|
+
gradient.addColorStop(1 - fadePercent, "rgba(255,255,255,0)");
|
|
370
|
+
// Right edge: fade from transparent (0) = keep to opaque (1) = remove
|
|
371
|
+
gradient.addColorStop(1, "rgba(255,255,255,1)");
|
|
372
|
+
gradientCacheRef.current = gradient;
|
|
373
|
+
lastWidthRef.current = rect.width;
|
|
374
|
+
}
|
|
375
|
+
ctx.globalCompositeOperation = "destination-out";
|
|
376
|
+
ctx.fillStyle = gradientCacheRef.current;
|
|
377
|
+
ctx.fillRect(0, 0, rect.width, rect.height);
|
|
378
|
+
ctx.globalCompositeOperation = "source-over";
|
|
379
|
+
}
|
|
380
|
+
ctx.globalAlpha = 1;
|
|
381
|
+
rafId = requestAnimationFrame(animate);
|
|
382
|
+
};
|
|
383
|
+
rafId = requestAnimationFrame(animate);
|
|
384
|
+
return () => {
|
|
385
|
+
if (rafId) {
|
|
386
|
+
cancelAnimationFrame(rafId);
|
|
387
|
+
}
|
|
388
|
+
};
|
|
389
|
+
}, [
|
|
390
|
+
audio,
|
|
391
|
+
active,
|
|
392
|
+
processing,
|
|
393
|
+
sensitivity,
|
|
394
|
+
updateRate,
|
|
395
|
+
historySize,
|
|
396
|
+
barWidth,
|
|
397
|
+
baseBarHeight,
|
|
398
|
+
barGap,
|
|
399
|
+
barRadius,
|
|
400
|
+
barColor,
|
|
401
|
+
fadeEdges,
|
|
402
|
+
fadeWidth,
|
|
403
|
+
mode,
|
|
404
|
+
]);
|
|
405
|
+
return (_jsxs("div", { className: className, ref: containerRef, style: {
|
|
406
|
+
position: "relative",
|
|
407
|
+
height: heightStyle,
|
|
408
|
+
width: "100%",
|
|
409
|
+
...style,
|
|
410
|
+
}, "aria-label": active
|
|
411
|
+
? "Live audio waveform"
|
|
412
|
+
: processing
|
|
413
|
+
? "Processing audio"
|
|
414
|
+
: "Audio waveform idle", role: "img", ...props, children: [!active && !processing && (_jsx("div", { style: {
|
|
415
|
+
position: "absolute",
|
|
416
|
+
top: "50%",
|
|
417
|
+
left: 0,
|
|
418
|
+
right: 0,
|
|
419
|
+
transform: "translateY(-50%)",
|
|
420
|
+
borderTop: "2px dotted rgba(128, 128, 128, 0.2)",
|
|
421
|
+
} })), _jsx("canvas", { style: { display: "block", height: "100%", width: "100%" }, ref: canvasRef, "aria-hidden": "true" })] }));
|
|
422
|
+
}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { BrowserChannel } from "../lib/browser-channel.js";
|
|
2
|
+
/**
|
|
3
|
+
* Return value from the useBrowserAudio hook.
|
|
4
|
+
*/
|
|
5
|
+
export interface UseBrowserAudioReturn {
|
|
6
|
+
/**
|
|
7
|
+
* Browser audio channel for mic capture and playback.
|
|
8
|
+
* Pass to useRealtime and LiveWaveform.
|
|
9
|
+
*/
|
|
10
|
+
channel?: BrowserChannel;
|
|
11
|
+
}
|
|
12
|
+
/**
|
|
13
|
+
* React hook for managing browser audio resources.
|
|
14
|
+
*
|
|
15
|
+
* Creates a BrowserChannel for mic capture and audio playback.
|
|
16
|
+
* Handles cleanup on unmount and provides a fresh channel after close.
|
|
17
|
+
*
|
|
18
|
+
* @example
|
|
19
|
+
* ```tsx
|
|
20
|
+
* const { channel } = useBrowserAudio();
|
|
21
|
+
*
|
|
22
|
+
* const start = async () => {
|
|
23
|
+
* await channel.init(); // request mic, setup audio
|
|
24
|
+
* connect(credential);
|
|
25
|
+
* };
|
|
26
|
+
*
|
|
27
|
+
* const stop = () => {
|
|
28
|
+
* disconnect();
|
|
29
|
+
* channel.close(); // cleanup audio resources
|
|
30
|
+
* };
|
|
31
|
+
* ```
|
|
32
|
+
*/
|
|
33
|
+
export declare function useBrowserAudio(): UseBrowserAudioReturn;
|
|
34
|
+
//# sourceMappingURL=use-browser-audio.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"use-browser-audio.d.ts","sourceRoot":"","sources":["../../src/hooks/use-browser-audio.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,cAAc,EAAE,MAAM,uBAAuB,CAAC;AAEvD;;GAEG;AACH,MAAM,WAAW,qBAAqB;IACpC;;;OAGG;IACH,OAAO,CAAC,EAAE,cAAc,CAAC;CAC1B;AAED;;;;;;;;;;;;;;;;;;;;GAoBG;AACH,wBAAgB,eAAe,IAAI,qBAAqB,CAavD"}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { useState, useEffect } from "react";
|
|
2
|
+
import { BrowserChannel } from "../lib/browser-channel.js";
|
|
3
|
+
/**
|
|
4
|
+
* React hook for managing browser audio resources.
|
|
5
|
+
*
|
|
6
|
+
* Creates a BrowserChannel for mic capture and audio playback.
|
|
7
|
+
* Handles cleanup on unmount and provides a fresh channel after close.
|
|
8
|
+
*
|
|
9
|
+
* @example
|
|
10
|
+
* ```tsx
|
|
11
|
+
* const { channel } = useBrowserAudio();
|
|
12
|
+
*
|
|
13
|
+
* const start = async () => {
|
|
14
|
+
* await channel.init(); // request mic, setup audio
|
|
15
|
+
* connect(credential);
|
|
16
|
+
* };
|
|
17
|
+
*
|
|
18
|
+
* const stop = () => {
|
|
19
|
+
* disconnect();
|
|
20
|
+
* channel.close(); // cleanup audio resources
|
|
21
|
+
* };
|
|
22
|
+
* ```
|
|
23
|
+
*/
|
|
24
|
+
export function useBrowserAudio() {
|
|
25
|
+
const [channel, setChannel] = useState();
|
|
26
|
+
useEffect(() => {
|
|
27
|
+
const ch = new BrowserChannel();
|
|
28
|
+
setChannel(ch);
|
|
29
|
+
return () => {
|
|
30
|
+
ch.close();
|
|
31
|
+
};
|
|
32
|
+
}, []);
|
|
33
|
+
return { channel };
|
|
34
|
+
}
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
import { RealtimeAgent } from "kernl";
|
|
2
|
+
import type { RealtimeModel, RealtimeChannel, ClientCredential, TransportStatus } from "@kernl-sdk/protocol";
|
|
3
|
+
/**
|
|
4
|
+
* Credential input that accepts expiresAt as either Date or string.
|
|
5
|
+
* Derived from ClientCredential to stay in sync.
|
|
6
|
+
*/
|
|
7
|
+
type FlexibleExpiry<T> = T extends {
|
|
8
|
+
expiresAt: Date;
|
|
9
|
+
} ? Omit<T, "expiresAt"> & {
|
|
10
|
+
expiresAt: Date | string;
|
|
11
|
+
} : never;
|
|
12
|
+
export type CredentialInput = FlexibleExpiry<ClientCredential>;
|
|
13
|
+
/**
|
|
14
|
+
* Options for the useRealtime hook.
|
|
15
|
+
*/
|
|
16
|
+
export interface UseRealtimeOptions<TContext> {
|
|
17
|
+
/**
|
|
18
|
+
* The realtime model to use.
|
|
19
|
+
*/
|
|
20
|
+
model: RealtimeModel;
|
|
21
|
+
/**
|
|
22
|
+
* Audio I/O channel for mic capture and playback.
|
|
23
|
+
*/
|
|
24
|
+
channel?: RealtimeChannel;
|
|
25
|
+
/**
|
|
26
|
+
* Context passed to tool executions.
|
|
27
|
+
*/
|
|
28
|
+
ctx?: TContext;
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* Return value from the useRealtime hook.
|
|
32
|
+
*/
|
|
33
|
+
export interface UseRealtimeReturn {
|
|
34
|
+
/**
|
|
35
|
+
* Current connection status.
|
|
36
|
+
*/
|
|
37
|
+
status: TransportStatus;
|
|
38
|
+
/**
|
|
39
|
+
* Connect to the realtime model with the given credential.
|
|
40
|
+
*/
|
|
41
|
+
connect: (credential: CredentialInput) => Promise<void>;
|
|
42
|
+
/**
|
|
43
|
+
* Disconnect from the realtime model.
|
|
44
|
+
*/
|
|
45
|
+
disconnect: () => void;
|
|
46
|
+
/**
|
|
47
|
+
* Whether audio input is muted.
|
|
48
|
+
*/
|
|
49
|
+
muted: boolean;
|
|
50
|
+
/**
|
|
51
|
+
* Mute audio input.
|
|
52
|
+
*/
|
|
53
|
+
mute: () => void;
|
|
54
|
+
/**
|
|
55
|
+
* Unmute audio input.
|
|
56
|
+
*/
|
|
57
|
+
unmute: () => void;
|
|
58
|
+
/**
|
|
59
|
+
* Send a text message to the model.
|
|
60
|
+
*/
|
|
61
|
+
sendMessage: (text: string) => void;
|
|
62
|
+
}
|
|
63
|
+
/**
|
|
64
|
+
* React hook for managing a realtime voice session.
|
|
65
|
+
*
|
|
66
|
+
* Handles connection lifecycle, status updates, and cleanup on unmount.
|
|
67
|
+
*
|
|
68
|
+
* @example
|
|
69
|
+
* ```tsx
|
|
70
|
+
* const { status, connect, disconnect } = useRealtime(agent, {
|
|
71
|
+
* model: openai.realtime("gpt-4o-realtime"),
|
|
72
|
+
* channel,
|
|
73
|
+
* ctx: { setCart },
|
|
74
|
+
* });
|
|
75
|
+
*
|
|
76
|
+
* const start = async () => {
|
|
77
|
+
* const { credential } = await fetch("/api/credential").then(r => r.json());
|
|
78
|
+
* await channel.init();
|
|
79
|
+
* connect(credential);
|
|
80
|
+
* };
|
|
81
|
+
* ```
|
|
82
|
+
*/
|
|
83
|
+
export declare function useRealtime<TContext>(agent: RealtimeAgent<TContext>, options: UseRealtimeOptions<TContext>): UseRealtimeReturn;
|
|
84
|
+
export {};
|
|
85
|
+
//# sourceMappingURL=use-realtime.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"use-realtime.d.ts","sourceRoot":"","sources":["../../src/hooks/use-realtime.ts"],"names":[],"mappings":"AACA,OAAO,EAAmB,aAAa,EAAW,MAAM,OAAO,CAAC;AAChE,OAAO,KAAK,EACV,aAAa,EACb,eAAe,EACf,gBAAgB,EAChB,eAAe,EAChB,MAAM,qBAAqB,CAAC;AAE7B;;;GAGG;AACH,KAAK,cAAc,CAAC,CAAC,IAAI,CAAC,SAAS;IAAE,SAAS,EAAE,IAAI,CAAA;CAAE,GAClD,IAAI,CAAC,CAAC,EAAE,WAAW,CAAC,GAAG;IAAE,SAAS,EAAE,IAAI,GAAG,MAAM,CAAA;CAAE,GACnD,KAAK,CAAC;AAEV,MAAM,MAAM,eAAe,GAAG,cAAc,CAAC,gBAAgB,CAAC,CAAC;AAE/D;;GAEG;AACH,MAAM,WAAW,kBAAkB,CAAC,QAAQ;IAC1C;;OAEG;IACH,KAAK,EAAE,aAAa,CAAC;IAErB;;OAEG;IACH,OAAO,CAAC,EAAE,eAAe,CAAC;IAE1B;;OAEG;IACH,GAAG,CAAC,EAAE,QAAQ,CAAC;CAChB;AAED;;GAEG;AACH,MAAM,WAAW,iBAAiB;IAChC;;OAEG;IACH,MAAM,EAAE,eAAe,CAAC;IAExB;;OAEG;IACH,OAAO,EAAE,CAAC,UAAU,EAAE,eAAe,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IAExD;;OAEG;IACH,UAAU,EAAE,MAAM,IAAI,CAAC;IAEvB;;OAEG;IACH,KAAK,EAAE,OAAO,CAAC;IAEf;;OAEG;IACH,IAAI,EAAE,MAAM,IAAI,CAAC;IAEjB;;OAEG;IACH,MAAM,EAAE,MAAM,IAAI,CAAC;IAEnB;;OAEG;IACH,WAAW,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,IAAI,CAAC;CACrC;AAED;;;;;;;;;;;;;;;;;;;GAmBG;AACH,wBAAgB,WAAW,CAAC,QAAQ,EAClC,KAAK,EAAE,aAAa,CAAC,QAAQ,CAAC,EAC9B,OAAO,EAAE,kBAAkB,CAAC,QAAQ,CAAC,GACpC,iBAAiB,CAsEnB"}
|