@kernl-sdk/react 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,601 @@
1
+ "use client";
2
+
3
+ import { useEffect, useRef, type HTMLAttributes } from "react";
4
+
5
+ /**
6
+ * Audio source interface for LiveWaveform visualization.
7
+ */
8
+ export interface AudioSource {
9
+ /** Analyser for speaker output (model audio). */
10
+ readonly output: AnalyserNode | null;
11
+ /** Analyser for mic input (user audio). */
12
+ readonly input: AnalyserNode | null;
13
+ }
14
+
15
+ export type LiveWaveformProps = HTMLAttributes<HTMLDivElement> & {
16
+ active?: boolean;
17
+ processing?: boolean;
18
+ /**
19
+ * Audio source for visualization (e.g., BrowserChannel).
20
+ */
21
+ audio?: AudioSource | null;
22
+ barWidth?: number;
23
+ barHeight?: number;
24
+ barGap?: number;
25
+ barRadius?: number;
26
+ barColor?: string;
27
+ fadeEdges?: boolean;
28
+ fadeWidth?: number;
29
+ height?: string | number;
30
+ sensitivity?: number;
31
+ historySize?: number;
32
+ updateRate?: number;
33
+ mode?: "scrolling" | "static";
34
+ };
35
+
36
+ export function LiveWaveform({
37
+ active = false,
38
+ processing = false,
39
+ audio,
40
+ barWidth = 3,
41
+ barGap = 1,
42
+ barRadius = 1.5,
43
+ barColor,
44
+ fadeEdges = true,
45
+ fadeWidth = 24,
46
+ barHeight: baseBarHeight = 4,
47
+ height = 64,
48
+ sensitivity = 1,
49
+ historySize = 60,
50
+ updateRate = 30,
51
+ mode = "static",
52
+ className,
53
+ style,
54
+ ...props
55
+ }: LiveWaveformProps) {
56
+ const canvasRef = useRef<HTMLCanvasElement>(null);
57
+ const containerRef = useRef<HTMLDivElement>(null);
58
+ const historyRef = useRef<number[]>([]);
59
+ const lastUpdateRef = useRef<number>(0);
60
+ const processingAnimationRef = useRef<number | null>(null);
61
+ const lastActiveDataRef = useRef<number[]>([]);
62
+ const transitionProgressRef = useRef(0);
63
+ const staticBarsRef = useRef<number[]>([]);
64
+ const needsRedrawRef = useRef(true);
65
+ const gradientCacheRef = useRef<CanvasGradient | null>(null);
66
+ const lastWidthRef = useRef(0);
67
+
68
+ // Listening state refs
69
+ const listeningBlendRef = useRef(0); // 0 = speaking, 1 = listening
70
+ const listeningTimeRef = useRef(0);
71
+ const silenceCountRef = useRef(0);
72
+
73
+ const heightStyle = typeof height === "number" ? `${height}px` : height;
74
+
75
+ // Handle canvas resizing
76
+ useEffect(() => {
77
+ const canvas = canvasRef.current;
78
+ const container = containerRef.current;
79
+ if (!canvas || !container) return;
80
+
81
+ const resizeObserver = new ResizeObserver(() => {
82
+ const rect = container.getBoundingClientRect();
83
+ const dpr = window.devicePixelRatio || 1;
84
+
85
+ canvas.width = rect.width * dpr;
86
+ canvas.height = rect.height * dpr;
87
+ canvas.style.width = `${rect.width}px`;
88
+ canvas.style.height = `${rect.height}px`;
89
+
90
+ const ctx = canvas.getContext("2d");
91
+ if (ctx) {
92
+ ctx.scale(dpr, dpr);
93
+ }
94
+
95
+ gradientCacheRef.current = null;
96
+ lastWidthRef.current = rect.width;
97
+ needsRedrawRef.current = true;
98
+ });
99
+
100
+ resizeObserver.observe(container);
101
+ return () => resizeObserver.disconnect();
102
+ }, []);
103
+
104
+ useEffect(() => {
105
+ if (processing && !active) {
106
+ let time = 0;
107
+ transitionProgressRef.current = 0;
108
+
109
+ const animateProcessing = () => {
110
+ time += 0.03;
111
+ transitionProgressRef.current = Math.min(
112
+ 1,
113
+ transitionProgressRef.current + 0.02,
114
+ );
115
+
116
+ const processingData = [];
117
+ const barCount = Math.floor(
118
+ (containerRef.current?.getBoundingClientRect().width || 200) /
119
+ (barWidth + barGap),
120
+ );
121
+
122
+ if (mode === "static") {
123
+ const halfCount = Math.floor(barCount / 2);
124
+
125
+ for (let i = 0; i < barCount; i++) {
126
+ const normalizedPosition = (i - halfCount) / halfCount;
127
+ const centerWeight = 1 - Math.abs(normalizedPosition) * 0.4;
128
+
129
+ const wave1 = Math.sin(time * 1.5 + normalizedPosition * 3) * 0.25;
130
+ const wave2 = Math.sin(time * 0.8 - normalizedPosition * 2) * 0.2;
131
+ const wave3 = Math.cos(time * 2 + normalizedPosition) * 0.15;
132
+ const combinedWave = wave1 + wave2 + wave3;
133
+ const processingValue = (0.2 + combinedWave) * centerWeight;
134
+
135
+ let finalValue = processingValue;
136
+ if (
137
+ lastActiveDataRef.current.length > 0 &&
138
+ transitionProgressRef.current < 1
139
+ ) {
140
+ const lastDataIndex = Math.min(
141
+ i,
142
+ lastActiveDataRef.current.length - 1,
143
+ );
144
+ const lastValue = lastActiveDataRef.current[lastDataIndex] || 0;
145
+ finalValue =
146
+ lastValue * (1 - transitionProgressRef.current) +
147
+ processingValue * transitionProgressRef.current;
148
+ }
149
+
150
+ processingData.push(Math.max(0.05, Math.min(1, finalValue)));
151
+ }
152
+ } else {
153
+ for (let i = 0; i < barCount; i++) {
154
+ const normalizedPosition = (i - barCount / 2) / (barCount / 2);
155
+ const centerWeight = 1 - Math.abs(normalizedPosition) * 0.4;
156
+
157
+ const wave1 = Math.sin(time * 1.5 + i * 0.15) * 0.25;
158
+ const wave2 = Math.sin(time * 0.8 - i * 0.1) * 0.2;
159
+ const wave3 = Math.cos(time * 2 + i * 0.05) * 0.15;
160
+ const combinedWave = wave1 + wave2 + wave3;
161
+ const processingValue = (0.2 + combinedWave) * centerWeight;
162
+
163
+ let finalValue = processingValue;
164
+ if (
165
+ lastActiveDataRef.current.length > 0 &&
166
+ transitionProgressRef.current < 1
167
+ ) {
168
+ const lastDataIndex = Math.floor(
169
+ (i / barCount) * lastActiveDataRef.current.length,
170
+ );
171
+ const lastValue = lastActiveDataRef.current[lastDataIndex] || 0;
172
+ finalValue =
173
+ lastValue * (1 - transitionProgressRef.current) +
174
+ processingValue * transitionProgressRef.current;
175
+ }
176
+
177
+ processingData.push(Math.max(0.05, Math.min(1, finalValue)));
178
+ }
179
+ }
180
+
181
+ if (mode === "static") {
182
+ staticBarsRef.current = processingData;
183
+ } else {
184
+ historyRef.current = processingData;
185
+ }
186
+
187
+ needsRedrawRef.current = true;
188
+ processingAnimationRef.current =
189
+ requestAnimationFrame(animateProcessing);
190
+ };
191
+
192
+ animateProcessing();
193
+
194
+ return () => {
195
+ if (processingAnimationRef.current) {
196
+ cancelAnimationFrame(processingAnimationRef.current);
197
+ }
198
+ };
199
+ } else if (!active && !processing) {
200
+ // Reset listening state
201
+ listeningBlendRef.current = 0;
202
+ listeningTimeRef.current = 0;
203
+ silenceCountRef.current = 0;
204
+
205
+ const hasData =
206
+ mode === "static"
207
+ ? staticBarsRef.current.length > 0
208
+ : historyRef.current.length > 0;
209
+
210
+ if (hasData) {
211
+ let fadeProgress = 0;
212
+ const fadeToIdle = () => {
213
+ fadeProgress += 0.03;
214
+ if (fadeProgress < 1) {
215
+ if (mode === "static") {
216
+ staticBarsRef.current = staticBarsRef.current.map(
217
+ (value) => value * (1 - fadeProgress),
218
+ );
219
+ } else {
220
+ historyRef.current = historyRef.current.map(
221
+ (value) => value * (1 - fadeProgress),
222
+ );
223
+ }
224
+ needsRedrawRef.current = true;
225
+ requestAnimationFrame(fadeToIdle);
226
+ } else {
227
+ if (mode === "static") {
228
+ staticBarsRef.current = [];
229
+ } else {
230
+ historyRef.current = [];
231
+ }
232
+ }
233
+ };
234
+ fadeToIdle();
235
+ }
236
+ }
237
+ }, [processing, active, barWidth, barGap, mode]);
238
+
239
+ // Animation loop
240
+ useEffect(() => {
241
+ const canvas = canvasRef.current;
242
+ if (!canvas) return;
243
+
244
+ const ctx = canvas.getContext("2d");
245
+ if (!ctx) return;
246
+
247
+ let rafId: number;
248
+
249
+ const animate = (currentTime: number) => {
250
+ // Read analysers directly each frame (properties populate after init)
251
+ const outputAnalyser = audio?.output;
252
+ const inputAnalyser = audio?.input;
253
+
254
+ // Render waveform
255
+ const rect = canvas.getBoundingClientRect();
256
+
257
+ // Update audio data if active
258
+ if (active && currentTime - lastUpdateRef.current > updateRate) {
259
+ lastUpdateRef.current = currentTime;
260
+
261
+ const barCount = Math.floor(rect.width / (barWidth + barGap));
262
+ const halfCount = Math.floor(barCount / 2);
263
+
264
+ // Threshold for detecting silence (agent not speaking)
265
+ const silenceThreshold = 0.08;
266
+ const silenceFramesRequired = 2;
267
+ const transitionSpeed = 0.18;
268
+
269
+ if (outputAnalyser) {
270
+ const dataArray = new Uint8Array(outputAnalyser.frequencyBinCount);
271
+ outputAnalyser.getByteFrequencyData(dataArray);
272
+
273
+ // Calculate average audio level
274
+ const startFreq = Math.floor(dataArray.length * 0.05);
275
+ const endFreq = Math.floor(dataArray.length * 0.4);
276
+ const relevantData = dataArray.slice(startFreq, endFreq);
277
+
278
+ let sum = 0;
279
+ for (let i = 0; i < relevantData.length; i++) {
280
+ sum += relevantData[i];
281
+ }
282
+ const avgLevel = sum / relevantData.length / 255;
283
+
284
+ // Update silence detection with hysteresis
285
+ if (avgLevel < silenceThreshold) {
286
+ silenceCountRef.current++;
287
+ } else {
288
+ silenceCountRef.current = 0;
289
+ }
290
+
291
+ const isListening = silenceCountRef.current >= silenceFramesRequired;
292
+
293
+ // Smoothly transition listening blend
294
+ if (isListening) {
295
+ listeningBlendRef.current = Math.min(
296
+ 1,
297
+ listeningBlendRef.current + transitionSpeed,
298
+ );
299
+ } else {
300
+ listeningBlendRef.current = Math.max(
301
+ 0,
302
+ listeningBlendRef.current - transitionSpeed * 2,
303
+ );
304
+ }
305
+
306
+ if (mode === "static") {
307
+ const newBars: number[] = [];
308
+
309
+ // Generate speaking bars from audio data
310
+ const speakingBars: number[] = [];
311
+ for (let i = halfCount - 1; i >= 0; i--) {
312
+ const dataIndex = Math.floor(
313
+ (i / halfCount) * relevantData.length,
314
+ );
315
+ const value = Math.min(
316
+ 1,
317
+ (relevantData[dataIndex] / 255) * sensitivity,
318
+ );
319
+ speakingBars.push(Math.max(0.05, value));
320
+ }
321
+ for (let i = 0; i < halfCount; i++) {
322
+ const dataIndex = Math.floor(
323
+ (i / halfCount) * relevantData.length,
324
+ );
325
+ const value = Math.min(
326
+ 1,
327
+ (relevantData[dataIndex] / 255) * sensitivity,
328
+ );
329
+ speakingBars.push(Math.max(0.05, value));
330
+ }
331
+
332
+ // Generate listening pattern (U-shape modulated by mic input)
333
+ listeningTimeRef.current += 0.05;
334
+
335
+ // Get mic input level for modulation
336
+ let micLevel = 0;
337
+ if (inputAnalyser) {
338
+ const inputData = new Uint8Array(
339
+ inputAnalyser.frequencyBinCount,
340
+ );
341
+ inputAnalyser.getByteFrequencyData(inputData);
342
+ let inputSum = 0;
343
+ for (let j = 0; j < inputData.length; j++) {
344
+ inputSum += inputData[j];
345
+ }
346
+ micLevel = inputSum / inputData.length / 255;
347
+ }
348
+
349
+ // Modulation: mic input drives the pulse, with subtle idle animation
350
+ const idleBreath =
351
+ 0.6 + Math.sin(listeningTimeRef.current * 0.8) * 0.1;
352
+ const micBoost = micLevel * 1.5; // amplify mic response
353
+ const breathe = Math.min(1, idleBreath + micBoost);
354
+ const secondaryBreath = micLevel * 0.2;
355
+
356
+ for (let i = 0; i < barCount; i++) {
357
+ // Distance from center (0 at center, 1 at edges)
358
+ const distFromCenter = Math.abs(i - halfCount) / halfCount;
359
+
360
+ // U-shape: higher at edges, lower at center
361
+ // Use smoothstep for nice curve
362
+ const t = distFromCenter;
363
+ const uShape = t * t * (3 - 2 * t); // smoothstep
364
+
365
+ // Listening value: base + u-shape, with mic-driven breathing
366
+ const listeningValue =
367
+ (0.05 + uShape * 0.55) * breathe + secondaryBreath * uShape;
368
+
369
+ // Blend between speaking and listening
370
+ const blend = listeningBlendRef.current;
371
+ const speakingValue = speakingBars[i] || 0.05;
372
+ const blendedValue =
373
+ speakingValue * (1 - blend) + listeningValue * blend;
374
+
375
+ newBars.push(Math.max(0.05, Math.min(1, blendedValue)));
376
+ }
377
+
378
+ staticBarsRef.current = newBars;
379
+ lastActiveDataRef.current = newBars;
380
+ } else {
381
+ // Scrolling mode - original behavior
382
+ const average = (sum / relevantData.length / 255) * sensitivity;
383
+ historyRef.current.push(Math.min(1, Math.max(0.05, average)));
384
+ lastActiveDataRef.current = [...historyRef.current];
385
+
386
+ if (historyRef.current.length > historySize) {
387
+ historyRef.current.shift();
388
+ }
389
+ }
390
+ needsRedrawRef.current = true;
391
+ } else if (active) {
392
+ // No analyser but active - show listening pattern
393
+ listeningBlendRef.current = Math.min(
394
+ 1,
395
+ listeningBlendRef.current + transitionSpeed,
396
+ );
397
+ listeningTimeRef.current += 0.05;
398
+
399
+ // Get mic input level for modulation
400
+ let micLevel = 0;
401
+ if (inputAnalyser) {
402
+ const inputData = new Uint8Array(
403
+ inputAnalyser.frequencyBinCount,
404
+ );
405
+ inputAnalyser.getByteFrequencyData(inputData);
406
+ let inputSum = 0;
407
+ for (let j = 0; j < inputData.length; j++) {
408
+ inputSum += inputData[j];
409
+ }
410
+ micLevel = inputSum / inputData.length / 255;
411
+ }
412
+
413
+ const idleBreath =
414
+ 0.6 + Math.sin(listeningTimeRef.current * 0.8) * 0.1;
415
+ const micBoost = micLevel * 1.5;
416
+ const breathe = Math.min(1, idleBreath + micBoost);
417
+ const secondaryBreath = micLevel * 0.2;
418
+
419
+ const newBars: number[] = [];
420
+ for (let i = 0; i < barCount; i++) {
421
+ const distFromCenter = Math.abs(i - halfCount) / halfCount;
422
+ const t = distFromCenter;
423
+ const uShape = t * t * (3 - 2 * t);
424
+ const listeningValue =
425
+ (0.05 + uShape * 0.55) * breathe + secondaryBreath * uShape;
426
+ newBars.push(Math.max(0.05, Math.min(1, listeningValue)));
427
+ }
428
+
429
+ staticBarsRef.current = newBars;
430
+ needsRedrawRef.current = true;
431
+ }
432
+ }
433
+
434
+ // Only redraw if needed
435
+ if (!needsRedrawRef.current && !active) {
436
+ rafId = requestAnimationFrame(animate);
437
+ return;
438
+ }
439
+
440
+ needsRedrawRef.current = active;
441
+ ctx.clearRect(0, 0, rect.width, rect.height);
442
+
443
+ const computedBarColor =
444
+ barColor ||
445
+ (() => {
446
+ const style = getComputedStyle(canvas);
447
+ // Try to get the computed color value directly
448
+ const color = style.color;
449
+ return color || "#000";
450
+ })();
451
+
452
+ const step = barWidth + barGap;
453
+ const barCount = Math.floor(rect.width / step);
454
+ const centerY = rect.height / 2;
455
+
456
+ // Draw bars based on mode
457
+ if (mode === "static") {
458
+ // Static mode - bars in fixed positions
459
+ const dataToRender = processing
460
+ ? staticBarsRef.current
461
+ : active
462
+ ? staticBarsRef.current
463
+ : staticBarsRef.current.length > 0
464
+ ? staticBarsRef.current
465
+ : [];
466
+
467
+ for (let i = 0; i < barCount && i < dataToRender.length; i++) {
468
+ const value = dataToRender[i] || 0.1;
469
+ const x = i * step;
470
+ const barHeight = Math.max(baseBarHeight, value * rect.height * 0.8);
471
+ const y = centerY - barHeight / 2;
472
+
473
+ ctx.fillStyle = computedBarColor;
474
+ ctx.globalAlpha = 0.4 + value * 0.6;
475
+
476
+ if (barRadius > 0) {
477
+ ctx.beginPath();
478
+ ctx.roundRect(x, y, barWidth, barHeight, barRadius);
479
+ ctx.fill();
480
+ } else {
481
+ ctx.fillRect(x, y, barWidth, barHeight);
482
+ }
483
+ }
484
+ } else {
485
+ // Scrolling mode - original behavior
486
+ for (let i = 0; i < barCount && i < historyRef.current.length; i++) {
487
+ const dataIndex = historyRef.current.length - 1 - i;
488
+ const value = historyRef.current[dataIndex] || 0.1;
489
+ const x = rect.width - (i + 1) * step;
490
+ const barHeight = Math.max(baseBarHeight, value * rect.height * 0.8);
491
+ const y = centerY - barHeight / 2;
492
+
493
+ ctx.fillStyle = computedBarColor;
494
+ ctx.globalAlpha = 0.4 + value * 0.6;
495
+
496
+ if (barRadius > 0) {
497
+ ctx.beginPath();
498
+ ctx.roundRect(x, y, barWidth, barHeight, barRadius);
499
+ ctx.fill();
500
+ } else {
501
+ ctx.fillRect(x, y, barWidth, barHeight);
502
+ }
503
+ }
504
+ }
505
+
506
+ // Apply edge fading
507
+ if (fadeEdges && fadeWidth > 0 && rect.width > 0) {
508
+ // Cache gradient if width hasn't changed
509
+ if (!gradientCacheRef.current || lastWidthRef.current !== rect.width) {
510
+ const gradient = ctx.createLinearGradient(0, 0, rect.width, 0);
511
+ const fadePercent = Math.min(0.3, fadeWidth / rect.width);
512
+
513
+ // destination-out: removes destination where source alpha is high
514
+ // We want: fade edges out, keep center solid
515
+ // Left edge: start opaque (1) = remove, fade to transparent (0) = keep
516
+ gradient.addColorStop(0, "rgba(255,255,255,1)");
517
+ gradient.addColorStop(fadePercent, "rgba(255,255,255,0)");
518
+ // Center stays transparent = keep everything
519
+ gradient.addColorStop(1 - fadePercent, "rgba(255,255,255,0)");
520
+ // Right edge: fade from transparent (0) = keep to opaque (1) = remove
521
+ gradient.addColorStop(1, "rgba(255,255,255,1)");
522
+
523
+ gradientCacheRef.current = gradient;
524
+ lastWidthRef.current = rect.width;
525
+ }
526
+
527
+ ctx.globalCompositeOperation = "destination-out";
528
+ ctx.fillStyle = gradientCacheRef.current;
529
+ ctx.fillRect(0, 0, rect.width, rect.height);
530
+ ctx.globalCompositeOperation = "source-over";
531
+ }
532
+
533
+ ctx.globalAlpha = 1;
534
+
535
+ rafId = requestAnimationFrame(animate);
536
+ };
537
+
538
+ rafId = requestAnimationFrame(animate);
539
+
540
+ return () => {
541
+ if (rafId) {
542
+ cancelAnimationFrame(rafId);
543
+ }
544
+ };
545
+ }, [
546
+ audio,
547
+ active,
548
+ processing,
549
+ sensitivity,
550
+ updateRate,
551
+ historySize,
552
+ barWidth,
553
+ baseBarHeight,
554
+ barGap,
555
+ barRadius,
556
+ barColor,
557
+ fadeEdges,
558
+ fadeWidth,
559
+ mode,
560
+ ]);
561
+
562
+ return (
563
+ <div
564
+ className={className}
565
+ ref={containerRef}
566
+ style={{
567
+ position: "relative",
568
+ height: heightStyle,
569
+ width: "100%",
570
+ ...style,
571
+ }}
572
+ aria-label={
573
+ active
574
+ ? "Live audio waveform"
575
+ : processing
576
+ ? "Processing audio"
577
+ : "Audio waveform idle"
578
+ }
579
+ role="img"
580
+ {...props}
581
+ >
582
+ {!active && !processing && (
583
+ <div
584
+ style={{
585
+ position: "absolute",
586
+ top: "50%",
587
+ left: 0,
588
+ right: 0,
589
+ transform: "translateY(-50%)",
590
+ borderTop: "2px dotted rgba(128, 128, 128, 0.2)",
591
+ }}
592
+ />
593
+ )}
594
+ <canvas
595
+ style={{ display: "block", height: "100%", width: "100%" }}
596
+ ref={canvasRef}
597
+ aria-hidden="true"
598
+ />
599
+ </div>
600
+ );
601
+ }
@@ -0,0 +1,50 @@
1
+ import { useState, useEffect } from "react";
2
+
3
+ import { BrowserChannel } from "@/lib/browser-channel";
4
+
5
+ /**
6
+ * Return value from the useBrowserAudio hook.
7
+ */
8
+ export interface UseBrowserAudioReturn {
9
+ /**
10
+ * Browser audio channel for mic capture and playback.
11
+ * Pass to useRealtime and LiveWaveform.
12
+ */
13
+ channel?: BrowserChannel;
14
+ }
15
+
16
+ /**
17
+ * React hook for managing browser audio resources.
18
+ *
19
+ * Creates a BrowserChannel for mic capture and audio playback.
20
+ * Handles cleanup on unmount and provides a fresh channel after close.
21
+ *
22
+ * @example
23
+ * ```tsx
24
+ * const { channel } = useBrowserAudio();
25
+ *
26
+ * const start = async () => {
27
+ * await channel.init(); // request mic, setup audio
28
+ * connect(credential);
29
+ * };
30
+ *
31
+ * const stop = () => {
32
+ * disconnect();
33
+ * channel.close(); // cleanup audio resources
34
+ * };
35
+ * ```
36
+ */
37
+ export function useBrowserAudio(): UseBrowserAudioReturn {
38
+ const [channel, setChannel] = useState<BrowserChannel>();
39
+
40
+ useEffect(() => {
41
+ const ch = new BrowserChannel();
42
+ setChannel(ch);
43
+
44
+ return () => {
45
+ ch.close();
46
+ };
47
+ }, []);
48
+
49
+ return { channel };
50
+ }